hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
22bb472a9580fa95c68ad07011b9d30ede27e92c
| 14,244
|
py
|
Python
|
ColorMaker.py
|
Ljuka/iwen
|
6aee69bf46c14e301002d0465a8a2b7e74e02953
|
[
"MIT"
] | 1
|
2019-03-14T17:02:46.000Z
|
2019-03-14T17:02:46.000Z
|
ColorMaker.py
|
Ljuka/iwen
|
6aee69bf46c14e301002d0465a8a2b7e74e02953
|
[
"MIT"
] | null | null | null |
ColorMaker.py
|
Ljuka/iwen
|
6aee69bf46c14e301002d0465a8a2b7e74e02953
|
[
"MIT"
] | null | null | null |
import os
import shutil
import re
import math
def createStartFolderStructure(path):
if os.path.isdir(path+'/Iwen Colours'):
shutil.rmtree(path+'/Iwen Colours')
os.mkdir(path+'/Iwen Colours')
os.mkdir(path+'/Iwen Colours/IOS')
os.mkdir(path+'/Iwen Colours/IOS/Colours.xcassets')
os.mkdir(path+'/Iwen Colours/Android')
def makeColourCodeForIOS(path, colours):
file = open(path + '/Iwen Colours/IOS/Colours.swift', 'w+')
file.write('// Colour palette generated by Iwen \r\nimport UIKit\r\n \r\nextension UIColor { \r\n\r\n')
for colour in colours:
if 'fontColor' in colour['representations'][0]['content']:
pass
else:
if colour['type'].find("gradient") == -1:
if 'value' in colour['representations'][0]['content']:
r = colour['representations'][0]['content']['value']['r']
g = colour['representations'][0]['content']['value']['g']
b = colour['representations'][0]['content']['value']['b']
else:
r = colour['representations'][0]['content'][0]['color']['value']['r']
g = colour['representations'][0]['content'][0]['color']['value']['g']
b = colour['representations'][0]['content'][0]['color']['value']['b']
alpha = "1.000" if 'alpha' not in colour['representations'][0]['content'] else round(
colour['representations'][0]['content']['alpha'], 3)
# Setting name to be #hex format if colour name is empty
if colour['name'] != '':
colourName = trimName(colour['name'])
else:
colourName = 'colour_'+str(convertToHex(r) + convertToHex(g) + convertToHex(b))
file.write('\t@nonobjc class var '+colourName+': UIColor {\r\n\t\treturn UIColor(red: '+str(r)+', green: '+str(g)+', blue: '+str(b)+', alpha: '+str(alpha)+')\r\n\t}\r\n\r\n')
else:
br = 0
for gradient in colour['representations'][0]['content']:
br += 1
r = gradient['color']['value']['r']
g = gradient['color']['value']['g']
b = gradient['color']['value']['b']
alpha = "1.000" if 'alpha' not in colour['representations'][0]['content'] else round(
colour['representations'][0]['content']['alpha'], 3)
# Setting name to be #hex format if colour name is empty
if colour['name'] != '':
colourName = trimName(colour['name']) + str(br)
else:
colourName = 'gradient_colour_' + str(br) + '_' + str(convertToHex(r) + convertToHex(g) + convertToHex(b))
file.write(
'\t@nonobjc class var ' + colourName + ': UIColor {\r\n\t\treturn UIColor(red: ' + str(
r) + ', green: ' + str(g) + ', blue: ' + str(b) + ', alpha: ' + str(
alpha) + ')\r\n\t}\r\n\r\n')
file.write('}')
def makeColourPaletteXcassets(path, colours):
# Make Contents.json file
file = open(path + '/Iwen Colours/IOS/Colours.xcassets/Contents.json', 'w+')
file.write('{\r\n\t"info" : {\r\n\t\t"version" : 1,\r\n\t\t"author" : "xcode"\r\n\t}\r\n}')
# Making folders
for colour in colours:
# Color in r g b and alpha
if 'fontColor' in colour['representations'][0]['content']:
pass
else:
if colour['type'].find("gradient") == -1:
if 'value' in colour['representations'][0]['content']:
r = colour['representations'][0]['content']['value']['r']
g = colour['representations'][0]['content']['value']['g']
b = colour['representations'][0]['content']['value']['b']
else:
r = colour['representations'][0]['content'][0]['color']['value']['r']
g = colour['representations'][0]['content'][0]['color']['value']['g']
b = colour['representations'][0]['content'][0]['color']['value']['b']
alpha = "1.000" if 'alpha' not in colour['representations'][0]['content'] else round(
colour['representations'][0]['content']['alpha'], 3)
# Setting name to be #hex format if colour name is empty
if colour['name'] != '':
colourName = trimName(colour['name'])
else:
colourName = 'colour_' + str(convertToHex(r) + convertToHex(g) + convertToHex(b))
os.mkdir(
path + '/Iwen Colours/IOS/Colours.xcassets/' + colourName + '.colorset') # make colorset folder
# Make Contents.json file in colorset folder`
fileInColorset = open(
path + '/Iwen Colours/IOS/Colours.xcassets/' + colourName + '.colorset/Contents.json', 'w+')
fileInColorset.write(
'{\r\n\t"info" : {\r\n\t\t"version" : 1,\r\n\t\t"author" : "xcode"\r\n},\r\n\t"colors" : [\r\n\t\t{\r\n\t\t\t"idiom" : "universal",\r\n\t\t\t"color" : {\r\n\t\t\t\t"color-space" : "srgb",\r\n\t\t\t\t"components" : {')
fileInColorset.write('\r\n\t\t\t\t\t"red" : "' + str(r) + '",\r\n\t\t\t\t\t"alpha" : "' + str(
alpha) + '",\r\n\t\t\t\t\t"blue" : "' + str(b) + '",\r\n\t\t\t\t\t"green" : "' + str(
g) + '"\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t]\r\n}')
else:
br = 0
for gradient in colour['representations'][0]['content']:
br += 1
r = gradient['color']['value']['r']
g = gradient['color']['value']['g']
b = gradient['color']['value']['b']
alpha = "1.000" if 'alpha' not in colour['representations'][0]['content'] else round(
colour['representations'][0]['content']['alpha'], 3)
# Setting name to be #hex format if colour name is empty
if colour['name'] != '':
colourName = trimName(colour['name']) + str(br)
else:
colourName = 'gradient_colour_' + str(br) + '_' + str(
convertToHex(r) + convertToHex(g) + convertToHex(b))
os.mkdir(
path + '/Iwen Colours/IOS/Colours.xcassets/' + colourName + '.colorset') # make colorset folder
# Make Contents.json file in colorset folder`
fileInColorset = open(
path + '/Iwen Colours/IOS/Colours.xcassets/' + colourName + '.colorset/Contents.json', 'w+')
fileInColorset.write(
'{\r\n\t"info" : {\r\n\t\t"version" : 1,\r\n\t\t"author" : "xcode"\r\n},\r\n\t"colors" : [\r\n\t\t{\r\n\t\t\t"idiom" : "universal",\r\n\t\t\t"color" : {\r\n\t\t\t\t"color-space" : "srgb",\r\n\t\t\t\t"components" : {')
fileInColorset.write('\r\n\t\t\t\t\t"red" : "' + str(r) + '",\r\n\t\t\t\t\t"alpha" : "' + str(
alpha) + '",\r\n\t\t\t\t\t"blue" : "' + str(b) + '",\r\n\t\t\t\t\t"green" : "' + str(
g) + '"\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t]\r\n}')
def makeColourCodeForAndroid(path, colours):
file = open(path + '/Iwen Colours/Android/colors.xml', 'w+')
file.write('// Colours generated by Iwen \r\n<?xml version="1.0" encoding="utf-8"?>\r\n</resources>')
for colour in colours:
# Color in r g b and alpha
if 'fontColor' in colour['representations'][0]['content']:
pass
else:
if colour['type'].find("gradient") == -1:
if 'value' in colour['representations'][0]['content']:
r = colour['representations'][0]['content']['value']['r']
g = colour['representations'][0]['content']['value']['g']
b = colour['representations'][0]['content']['value']['b']
else:
r = colour['representations'][0]['content'][0]['color']['value']['r']
g = colour['representations'][0]['content'][0]['color']['value']['g']
b = colour['representations'][0]['content'][0]['color']['value']['b']
alpha = "1.000" if 'alpha' not in colour['representations'][0]['content'] else round(
colour['representations'][0]['content']['alpha'], 3)
# Colour in #ARGB format
hexColour = "#" + alpha + str(convertToHex(r) + convertToHex(g) + convertToHex(b))
# Setting name to be colour_#ARGB format if colour name is empty
colourName = trimName(colour['name']) if colour['name'] != '' else 'color_' + hexColour.replace('#', '')
file.write('\r\n\t<color name="' + colourName + '">' + hexColour + '</color>')
else:
br = 0
for gradient in colour['representations'][0]['content']:
br += 1
r = gradient['color']['value']['r']
g = gradient['color']['value']['g']
b = gradient['color']['value']['b']
alpha = "1.000" if 'alpha' not in colour['representations'][0]['content'] else round(
colour['representations'][0]['content']['alpha'], 3)
# Colour in #ARGB format
hexColour = "#" + alpha + str(convertToHex(r) + convertToHex(g) + convertToHex(b))
# Setting name to be colour_#ARGB format if colour name is empty
if colour['name'] != '':
colourName = trimName(colour['name']) + str(br)
else:
colourName = 'color_' + hexColour.replace('#', '') + str(br)
file.write('\r\n\t<color name="' + colourName + '">' + hexColour + '</color>')
file.write('\r\n</resources>')
def makeJsonFile(path, colours):
file = open(path + '/colours.json', 'w+')
file.write('[')
a = 0
for colour in colours:
# Color in r g b and alpha
if 'fontColor' in colour['representations'][0]['content']:
pass
else:
if colour['type'].find("gradient") == -1:
if 'value' in colour['representations'][0]['content']:
r = colour['representations'][0]['content']['value']['r']
g = colour['representations'][0]['content']['value']['g']
b = colour['representations'][0]['content']['value']['b']
else:
r = colour['representations'][0]['content'][0]['color']['value']['r']
g = colour['representations'][0]['content'][0]['color']['value']['g']
b = colour['representations'][0]['content'][0]['color']['value']['b']
alpha = "1.000" if 'alpha' not in colour['representations'][0]['content'] else round(
colour['representations'][0]['content']['alpha'], 3)
# Colour in #ARGB format
hexColour = str(convertToHex(r) + convertToHex(g) + convertToHex(b))
# Setting name to be colour_#ARGB format if colour name is empty
colourName = trimName(colour['name']) if colour['name'] != '' else 'color_' + hexColour
if a > 0:
file.write(',')
a = 1
file.write(
'\r\n\t{ "name": "' + colourName + '", "r": "' + str(r) + '", "g": "' + str(g) + '", "b": "' + str(
b) + '", "a": "' + alpha + '" }')
else:
br = 0
for gradient in colour['representations'][0]['content']:
br += 1
r = gradient['color']['value']['r']
g = gradient['color']['value']['g']
b = gradient['color']['value']['b']
alpha = "1.000" if 'alpha' not in colour['representations'][0]['content'] else round(
colour['representations'][0]['content']['alpha'], 3)
# Colour in #ARGB format
hexColour = str(convertToHex(r) + convertToHex(g) + convertToHex(b))
# Setting name to be colour_#ARGB format if colour name is empty
colourName = trimName(colour['name']) + str(br) if colour['name'] != '' else 'color_' + hexColour + str(br)
if a > 0:
file.write(',')
a = 1
file.write('\r\n\t{ "name": "' + colourName + '", "r": "' + str(r) + '", "g": "' + str(
g) + '", "b": "' + str(b) + '", "a": "' + alpha + '" }')
file.write('\r\n]')
def makeClrFile(path):
command = 'swift -suppress-warnings makeClrFile.swift -n "iwenColours" -i '+path+'/colours.json -o '+path+'/Iwen\ Colours/IOS'
process = os.popen(command)
results = str(process.read())
os.remove(path + "/colours.json")
if results == "SUCCESS\n":
return "OK"
else:
if results.split(' ', 1)[0] == "xcode-select:":
return "X"
else:
return results
def convertToHex(decimalNumber):
return "00" if decimalNumber <= 0 else hex(decimalNumber).lstrip("0x").rstrip("L")
def zipdir(path, ziph):
# ziph is zipfile handle
for root, dirs, files in os.walk(path):
for file in files:
ziph.write(os.path.join(root, file))
def removeUnzippedDir(path):
if os.path.isdir(path+'/Iwen Colours'):
shutil.rmtree(path+'/Iwen Colours')
def trimName(name):
trimmedName = re.sub('[^A-Za-z0-9]+', '', name)
return trimmedName[0].lower() + trimmedName[1:]
| 50.510638
| 241
| 0.478587
| 1,596
| 14,244
| 4.261278
| 0.091479
| 0.021173
| 0.168211
| 0.221732
| 0.845611
| 0.837965
| 0.832378
| 0.81488
| 0.802529
| 0.802529
| 0
| 0.014417
| 0.332842
| 14,244
| 281
| 242
| 50.690391
| 0.701252
| 0.057709
| 0
| 0.71564
| 0
| 0.033175
| 0.287891
| 0.061627
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047393
| false
| 0.018957
| 0.023697
| 0.004739
| 0.094787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
22cd0767375ae61b6ee76d6157f8d5df23392d86
| 11,036
|
py
|
Python
|
rosetta/tests/test_streamer.py
|
rjweiss/rosetta
|
919c6fc2c54ce726121ab24f738c46cda0598a08
|
[
"BSD-3-Clause"
] | 1
|
2015-01-19T22:26:10.000Z
|
2015-01-19T22:26:10.000Z
|
rosetta/tests/test_streamer.py
|
rjweiss/rosetta
|
919c6fc2c54ce726121ab24f738c46cda0598a08
|
[
"BSD-3-Clause"
] | null | null | null |
rosetta/tests/test_streamer.py
|
rjweiss/rosetta
|
919c6fc2c54ce726121ab24f738c46cda0598a08
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import unittest
from StringIO import StringIO
from scipy import sparse
from rosetta import TokenizerBasic
from rosetta.text.streamers import TextFileStreamer, TextIterStreamer
from rosetta.text.streamers import MySQLStreamer, MongoStreamer
from rosetta.common import DocIDError, TokenError
class TestTextFileStreamer(unittest.TestCase):
def setUp(self):
self.test_path = os.path.abspath('./rosetta/tests')
self.testdata_path = os.path.join(self.test_path, 'temp')
###create some temp files to work with
self.doc1 = os.path.join(self.testdata_path, 'doc1.txt')
self.doc2 = os.path.join(self.testdata_path, 'doc2.txt')
with open(self.doc1, 'w') as f:
f.write('doomed to failure\n')
with open(self.doc2, 'w') as f:
f.write('set for success\n')
self.tokenizer = TokenizerBasic()
def test_info_stream(self):
stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
tokenizer=self.tokenizer)
token_benchmark = [['doomed', 'failure'],
['set', 'success']]
text_benchmark = ['doomed to failure\n', 'set for success\n']
token_result = []
text_result = []
for each in stream.info_stream():
token_result.append(each['tokens'])
text_result.append(each['text'])
self.assertEqual(token_benchmark, token_result)
self.assertEqual(text_benchmark, text_result)
def test_token_stream(self):
stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
tokenizer=self.tokenizer)
token_benchmark = [['doomed', 'failure'],
['set', 'success']]
id_benchmark = ['doc1', 'doc2']
token_result = []
for each in stream.token_stream(cache_list=['doc_id']):
token_result.append(each)
self.assertEqual(token_benchmark, token_result)
self.assertEqual(id_benchmark, stream.__dict__['doc_id_cache'])
def test_to_vw(self):
stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
tokenizer=self.tokenizer)
result = StringIO()
stream.to_vw(result)
benchmark = " 1 doc1| failure:1 doomed:1\n 1 doc2| set:1 success:1\n"
self.assertEqual(benchmark, result.getvalue())
def test_to_scipyspare(self):
stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
tokenizer=self.tokenizer)
result = stream.to_scipysparse()
benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])
compare = result.toarray() == benchmark.toarray()
self.assertTrue(compare.all())
def tearDown(self):
os.remove(self.doc1)
os.remove(self.doc2)
class TestTextIterStreamer(unittest.TestCase):
def setUp(self):
self.text_iter = [{'text': 'doomed to failure', 'doc_id': 'a'},
{'text': 'set for success', 'doc_id': '1'}]
self.tokenizer = TokenizerBasic()
def test_info_stream(self):
stream = TextIterStreamer(text_iter=self.text_iter,
tokenizer=self.tokenizer)
token_benchmark = [['doomed', 'failure'],
['set', 'success']]
text_benchmark = ['doomed to failure', 'set for success']
token_result = []
text_result = []
for each in stream.info_stream():
token_result.append(each['tokens'])
text_result.append(each['text'])
self.assertEqual(token_benchmark, token_result)
self.assertEqual(text_benchmark, text_result)
def test_token_stream(self):
stream = TextIterStreamer(text_iter=self.text_iter,
tokenizer=self.tokenizer)
token_benchmark = [['doomed', 'failure'],
['set', 'success']]
id_benchmark = ['a', '1']
token_result = []
for each in stream.token_stream(cache_list=['doc_id']):
token_result.append(each)
self.assertEqual(token_benchmark, token_result)
self.assertEqual(id_benchmark, stream.__dict__['doc_id_cache'])
def test_to_scipyspare(self):
stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
tokenizer=self.tokenizer)
result = stream.to_scipysparse()
benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])
def test_to_vw(self):
stream = TextIterStreamer(text_iter=self.text_iter,
tokenizer=self.tokenizer)
result = StringIO()
stream.to_vw(result)
benchmark = " 1 a| failure:1 doomed:1\n 1 1| set:1 success:1\n"
self.assertEqual(benchmark, result.getvalue())
def test_to_scipyspare(self):
stream = TextIterStreamer(text_iter=self.text_iter,
tokenizer=self.tokenizer)
result = stream.to_scipysparse()
benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])
compare = result.toarray() == benchmark.toarray()
self.assertTrue(compare.all())
class TestMySQLStreamer(unittest.TestCase):
def setUp(self):
self.query_result = [{'text': 'doomed to failure', 'doc_id': 'a'},
{'text': 'set for success', 'doc_id': '1'}]
class MockCursor(object):
def __init__(self, my_iter):
self.my_iter = my_iter
def __iter__(self):
for item in self.my_iter:
yield item
def execute(self, query):
return None
self.mock_cursor = MockCursor(self.query_result)
self.db_setup = {}
self.db_setup['host'] = 'hostname'
self.db_setup['user'] = 'username'
self.db_setup['password'] = 'password'
self.db_setup['database'] = 'database'
self.db_setup['query'] = 'select something'
self.tokenizer = TokenizerBasic()
def test_info_stream(self):
stream = MySQLStreamer(self.db_setup,
tokenizer=self.tokenizer)
stream.cursor = self.mock_cursor
token_benchmark = [['doomed', 'failure'],
['set', 'success']]
text_benchmark = ['doomed to failure', 'set for success']
token_result = []
text_result = []
for each in stream.info_stream():
token_result.append(each['tokens'])
text_result.append(each['text'])
self.assertEqual(token_benchmark, token_result)
self.assertEqual(text_benchmark, text_result)
def test_token_stream(self):
stream = MySQLStreamer(self.db_setup,
tokenizer=self.tokenizer)
stream.cursor = self.mock_cursor
token_benchmark = [['doomed', 'failure'],
['set', 'success']]
id_benchmark = ['a', '1']
token_result = []
for each in stream.token_stream(cache_list=['doc_id']):
token_result.append(each)
self.assertEqual(token_benchmark, token_result)
self.assertEqual(id_benchmark, stream.__dict__['doc_id_cache'])
def test_to_vw(self):
stream = MySQLStreamer(self.db_setup,
tokenizer=self.tokenizer)
stream.cursor = self.mock_cursor
result = StringIO()
stream.to_vw(result, cache_list=['doc_id'])
benchmark = " 1 a| failure:1 doomed:1\n 1 1| set:1 success:1\n"
self.assertEqual(benchmark, result.getvalue())
def test_to_scipyspare(self):
stream = MySQLStreamer(self.db_setup,
tokenizer=self.tokenizer)
stream.cursor = self.mock_cursor
result = stream.to_scipysparse()
benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])
compare = result.toarray() == benchmark.toarray()
self.assertTrue(compare.all())
class TestMongoStreamer(unittest.TestCase):
def setUp(self):
self.query_result = [{'text': 'doomed to failure', '_id': 'a'},
{'text': 'set for success', '_id': '1'}]
class MockCursor(object):
def __init__(self, my_iter):
self.my_iter = my_iter
def find(self, query):
for item in self.my_iter:
yield item
def execute(self):
pass
self.mock_cursor = MockCursor(self.query_result)
self.db_setup = {}
self.db_setup['host'] = 'hostname'
self.db_setup['user'] = 'username'
self.db_setup['password'] = 'password'
self.db_setup['database'] = 'database'
self.db_setup['query'] = 'select something'
self.db_setup['text_key'] = 'text'
self.db_setup['translations'] = {'_id': 'doc_id'}
self.tokenizer = TokenizerBasic()
def test_info_stream(self):
stream = MongoStreamer(self.db_setup,
tokenizer=self.tokenizer)
stream.cursor = self.mock_cursor
token_benchmark = [['doomed', 'failure'],
['set', 'success']]
text_benchmark = ['doomed to failure', 'set for success']
token_result = []
text_result = []
for each in stream.info_stream():
token_result.append(each['tokens'])
text_result.append(each['text'])
self.assertEqual(token_benchmark, token_result)
self.assertEqual(text_benchmark, text_result)
def test_token_stream(self):
stream = MongoStreamer(self.db_setup,
tokenizer=self.tokenizer)
stream.cursor = self.mock_cursor
token_benchmark = [['doomed', 'failure'],
['set', 'success']]
id_benchmark = ['a', '1']
token_result = []
for each in stream.token_stream(cache_list=['doc_id']):
token_result.append(each)
self.assertEqual(token_benchmark, token_result)
self.assertEqual(id_benchmark, stream.__dict__['doc_id_cache'])
def test_to_vw(self):
stream = MongoStreamer(self.db_setup,
tokenizer=self.tokenizer)
stream.cursor = self.mock_cursor
result = StringIO()
stream.to_vw(result, cache_list=['doc_id'])
benchmark = " 1 a| failure:1 doomed:1\n 1 1| set:1 success:1\n"
self.assertEqual(benchmark, result.getvalue())
def test_to_scipyspare(self):
stream = MongoStreamer(self.db_setup,
tokenizer=self.tokenizer)
stream.cursor = self.mock_cursor
result = stream.to_scipysparse()
benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])
compare = result.toarray() == benchmark.toarray()
self.assertTrue(compare.all())
| 37.158249
| 77
| 0.578833
| 1,213
| 11,036
| 5.065128
| 0.091509
| 0.042969
| 0.039388
| 0.035156
| 0.893392
| 0.880371
| 0.853678
| 0.853678
| 0.853678
| 0.82959
| 0
| 0.01231
| 0.300743
| 11,036
| 296
| 78
| 37.283784
| 0.783854
| 0.003171
| 0
| 0.837607
| 0
| 0.017094
| 0.097472
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 1
| 0.119658
| false
| 0.012821
| 0.034188
| 0.004274
| 0.183761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22efcb89ff2f1d3bcd8e60df6a939a8513551b4f
| 68,603
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/backup_results_unknownr/EightThreads_h264ref/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/backup_results_unknownr/EightThreads_h264ref/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/backup_results_unknownr/EightThreads_h264ref/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.000107695,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202773,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.00070841,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.619807,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.07328,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.615558,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.30865,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.612546,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.09997,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000133834,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0224685,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.162511,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.166168,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.162645,
'Execution Unit/Register Files/Runtime Dynamic': 0.188637,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.392723,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.20326,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 4.3087,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00285011,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00285011,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00247776,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000956619,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00238702,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.010565,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0274939,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.159742,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.472511,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.542555,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.21287,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0313857,
'L2/Runtime Dynamic': 0.00792346,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.8313,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.69691,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.180985,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.180985,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.68942,
'Load Store Unit/Runtime Dynamic': 3.77045,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.446278,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.892556,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.158385,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.158803,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0776214,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.832312,
'Memory Management Unit/Runtime Dynamic': 0.236424,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 28.1835,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000467569,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0316991,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.333958,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.366125,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 9.90249,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 1.41703e-05,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.2027,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 8.09612e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.406489,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.655651,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.330951,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.39309,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.464892,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.73305,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 1.52953e-05,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.01705,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.123298,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.126095,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.123313,
'Execution Unit/Register Files/Runtime Dynamic': 0.143145,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.259757,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.839221,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.98353,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00160622,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00160622,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00141491,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000556425,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00181136,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00643871,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0148325,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.121218,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.302917,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.411712,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 0.857118,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0205201,
'L2/Runtime Dynamic': 0.00867599,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 5.95894,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.27982,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.152762,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.152762,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 6.68032,
'Load Store Unit/Runtime Dynamic': 3.18595,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.376685,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.75337,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.133687,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.13398,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.049702,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.785754,
'Memory Management Unit/Runtime Dynamic': 0.183682,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 24.7731,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 3.97702e-05,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0183401,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.215443,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.233823,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 7.45279,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 3.30641e-05,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202715,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.000212523,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.372101,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.600184,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.302953,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.27524,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.425544,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.65621,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 4.01502e-05,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0156076,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.112874,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.115428,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.112914,
'Execution Unit/Register Files/Runtime Dynamic': 0.131035,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.237802,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.756814,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.77118,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00164868,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00164868,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00145609,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000574661,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00165813,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00641157,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0150898,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.110963,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.291947,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.376882,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 0.801293,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0194717,
'L2/Runtime Dynamic': 0.00707992,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 5.42811,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.02257,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.135588,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.135588,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 6.06839,
'Load Store Unit/Runtime Dynamic': 2.82684,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.334338,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.668676,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.118658,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.118928,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0479251,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.759937,
'Memory Management Unit/Runtime Dynamic': 0.166853,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 24.0574,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000105581,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0167894,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.196819,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.213714,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 6.78696,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 5.95155e-05,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202736,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.000407336,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.339854,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.548172,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.276699,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.16472,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.388632,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.58425,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 7.69545e-05,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.014255,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.1031,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.105424,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.103177,
'Execution Unit/Register Files/Runtime Dynamic': 0.119679,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.217219,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.676539,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.56906,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00172945,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00172945,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00153113,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000606278,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00151443,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00650446,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0156965,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.101347,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.281529,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.344221,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 0.749298,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0183368,
'L2/Runtime Dynamic': 0.00554616,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.90577,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.76943,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.11869,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.11869,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.46625,
'Load Store Unit/Runtime Dynamic': 2.47345,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.292668,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.585336,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.103869,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.104113,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0462454,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.734532,
'Memory Management Unit/Runtime Dynamic': 0.150358,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 23.3568,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000202958,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0153357,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.179258,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.194797,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 6.14251,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 1.549791922570631,
'Runtime Dynamic': 1.549791922570631,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.120956,
'Runtime Dynamic': 0.0743872,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 100.492,
'Peak Power': 133.604,
'Runtime Dynamic': 30.3591,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 100.371,
'Total Cores/Runtime Dynamic': 30.2847,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.120956,
'Total L3s/Runtime Dynamic': 0.0743872,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.057987
| 124
| 0.681938
| 8,090
| 68,603
| 5.776885
| 0.066378
| 0.12359
| 0.112977
| 0.093463
| 0.941243
| 0.933219
| 0.922221
| 0.89586
| 0.872986
| 0.853814
| 0
| 0.131549
| 0.224349
| 68,603
| 914
| 125
| 75.057987
| 0.74673
| 0
| 0
| 0.654267
| 0
| 0
| 0.657469
| 0.048102
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3e0a05c2aa5412ea5205a77b2641badae72061e
| 123
|
py
|
Python
|
PManager/classes/sniffer/__init__.py
|
srisankethu/opengift.io
|
fc490332bd0252610b55a68c1fff1c4f704fcbd4
|
[
"Apache-2.0"
] | 1
|
2020-08-30T23:12:08.000Z
|
2020-08-30T23:12:08.000Z
|
PManager/classes/sniffer/__init__.py
|
lenarhus/opengift.io
|
db37494eac141e795c8d9d5b262d54cd6f20fb15
|
[
"Apache-2.0"
] | null | null | null |
PManager/classes/sniffer/__init__.py
|
lenarhus/opengift.io
|
db37494eac141e795c8d9d5b262d54cd6f20fb15
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'Tonakai'
from PManager.classes.sniffer.js_sniffer import *
from PManager.classes.sniffer.php_sniffer import *
| 41
| 50
| 0.829268
| 16
| 123
| 6
| 0.5625
| 0.25
| 0.395833
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081301
| 123
| 3
| 50
| 41
| 0.849558
| 0
| 0
| 0
| 0
| 0
| 0.056452
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
43021a4f4001525af97e2dd810ed30bf386893d4
| 17,702
|
py
|
Python
|
kolibril_projects/physics/balmer/balmer_series.py
|
kolibril13/manim
|
6a1e03ce79e5abb8bafbf11a7345b51aa7b0d333
|
[
"MIT"
] | 5
|
2019-02-22T14:10:08.000Z
|
2022-03-13T01:03:49.000Z
|
kolibril_projects/physics/balmer/balmer_series.py
|
kolibril13/manim
|
6a1e03ce79e5abb8bafbf11a7345b51aa7b0d333
|
[
"MIT"
] | 1
|
2021-04-15T08:02:43.000Z
|
2021-04-15T09:46:42.000Z
|
kolibril_projects/physics/balmer/balmer_series.py
|
kolibril13/manim
|
6a1e03ce79e5abb8bafbf11a7345b51aa7b0d333
|
[
"MIT"
] | 1
|
2019-03-22T14:33:25.000Z
|
2019-03-22T14:33:25.000Z
|
from manim import *
class Balmer(Scene):
# Todo: remove this top, bottom, etc. stuff from the top
def construct(self):
TOP = config["frame_height"] / 2 * UP
BOTTOM = config["frame_height"] / 2 * DOWN
LEFT_SIDE = config["frame_width"] / 2 * LEFT
RIGHT_SIDE = config["frame_width"] / 2 * RIGHT
normal_red= "#FF1900"
def balmer(n):
Z=1
a_0=1
scale_fac= 0.13
return scale_fac* n**2/Z*a_0
trans_colmap = {
"t32" : "#ff0000",
"t42" :"#00efff",
"t52" :"#2800ff",
"t62" :"#7e00db",
"t72" :"#8100a9"
}
trans_wavelengths = {
"t32" : r" \text{Transition 3} \rightarrow \text{2}: \lambda = 656\,\text{nm}" ,
"t42" : r" \text{Transition 4} \rightarrow \text{2}: \lambda = 486\,\text{nm}" ,
"t52" : r" \text{Transition 5} \rightarrow \text{2}: \lambda = 434\,\text{nm}" ,
"t62" : r" \text{Transition 6} \rightarrow \text{2}: \lambda = 410\,\text{nm}" ,
"t72" : r" \text{Transition 7} \rightarrow \text{2}: \lambda = 397\,\text{nm}" ,
}
trans_names = {
"t32" : r"\alpha",
"t42" : r"\beta",
"t52" : r"\gamma",
"t62" : r"\delta",
"t72" : r"\epsilon"
}
levels= {"n1":balmer(1),
"n2":balmer(2),
"n3":balmer(3),
"n4":balmer(4),
"n5":balmer(5),
"n6":balmer(6),
"n7":balmer(7),
}
pos0=2.5
offset=0.5
core = Dot().set_color(normal_red).scale(1).move_to(BOTTOM + 2 * SMALL_BUFF * UP)
tplus = MathTex(r"\textbf{+}").move_to(core.get_center()).scale(0.2).set_color(BLACK).set_stroke(width=1.5)
core = VGroup(core, tplus)
arcs = [Arc(0.9*np.pi,- 2*np.pi, radius = lev, arc_center = core.get_center()) for lev in levels.values()]
annot = [MathTex(f"n={n}").shift(UP*n).scale(0.31) for n in range(1,1+ len(levels))]
all_arcs = VGroup(*arcs).set_color(BLUE)
all_annot = VGroup(*annot).set_color(interpolate_color(BLUE, BLACK, 0.6))
[x.move_to(core.get_center()+y*UP+UP*SMALL_BUFF) for x,y in zip(all_annot.submobjects, levels.values())]
self.play(FadeIn(core))
self.play(FadeIn(all_arcs, core.get_center()),lag_ratio= 0.9 , run_time=1)
self.play(FadeIn(all_annot),lag_ratio= 0.9 , run_time=2)
self.wait(0.5)
## setup ready
#first photon
position_shift = pos0
trans_id = "t32"
levup = "n3"
levdown= "n2"
el1 = Dot().set_color(YELLOW).scale(2)
el1.move_to(core.get_center()+UP*levels[levup])
el1.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el1.get_center())
elektron1 = VGroup(el1,t)
el2 = Dot().set_color(YELLOW).scale(2)
el2.move_to(core.get_center()+UP*levels[levdown])
el2.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el2.get_center())
elektron2 = VGroup(el2,t)
veci = Line(el1,el2).get_vector()
veci=veci/get_norm(veci)
print(veci)
CanvasNameObject= MathTex(trans_names[trans_id]).scale(1.3).move_to(el1.get_center()-veci*0.3).set_color(trans_colmap[trans_id])
path = VMobject().set_color(trans_colmap[trans_id])
path.set_points_as_corners([el1.get_center(),el1.get_center()+UP*0.01])
def update_path(path):
previus_path = path.copy()
previus_path.add_points_as_corners([el1.get_center()])
path.become(previus_path)
path.add_updater(update_path)
pathSolid = Line(el1,el2).set_color(trans_colmap[trans_id])
new_intepolated_color= interpolate_color(WHITE,trans_colmap[trans_id], 0.4)
dotphoton= Dot().scale(4).set_color(new_intepolated_color)
arc= Arc(-TAU/2, 3*TAU/2, radius=dotphoton.get_width()/2 , arc_center= dotphoton.get_center())
arc.set_color(trans_colmap[trans_id])
x = np.linspace(-PI,PI,100)
y = 2*np.sin(2*x)
sin_curve = VMobject()
sin_curve.set_points_smoothly([[xi,yi,0]
for xi, yi in zip(x,y) ] )
sin_curve.scale(0.06).set_color(trans_colmap[trans_id])
photon1 = VGroup(dotphoton,sin_curve,arc).move_to(Line(el1,el2).get_center()).shift(2*RIGHT)
i1 = MathTex(trans_wavelengths[trans_id], color= BLACK).scale(0.6)
i1.to_corner(DR,buff=SMALL_BUFF).shift(1.5*LEFT)
i1.background_rectangle = BackgroundRectangle(i1, color=WHITE, opacity= 0.2)
self.add(path)
self.play(FadeIn(elektron1))
self.play(FadeIn(CanvasNameObject),run_time=0.4)
self.play( elektron1.move_to,elektron2 , run_time= 0.5, rate_func= linear) #####
self.add(i1.background_rectangle,i1)
self.play(Transform(pathSolid, sin_curve), rate_func= linear, run_time=1 )
self.add(photon1)
self.play(VGroup(pathSolid,photon1).shift ,3*RIGHT, rate_func= linear, run_time= 1.5)
self.play(FadeOutAndShift(VGroup(pathSolid,photon1) ,RIGHT, rate_func= linear, run_time= 0.5))
self.play(FadeOut(elektron1),run_time=0.5)
self.remove(i1.background_rectangle,i1)
self.wait(0.5)
path.remove_updater(update_path)
#second photon
position_shift = pos0-offset
trans_id = "t42"
levup = "n4"
levdown= "n2"
el1 = Dot().set_color(YELLOW).scale(2)
el1.move_to(core.get_center()+UP*levels[levup])
el1.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el1.get_center())
elektron1 = VGroup(el1,t)
el2 = Dot().set_color(YELLOW).scale(2)
el2.move_to(core.get_center()+UP*levels[levdown])
el2.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el2.get_center())
elektron2 = VGroup(el2,t)
veci = Line(el1,el2).get_vector()
veci=veci/get_norm(veci)
print(veci)
CanvasNameObject= MathTex(trans_names[trans_id]).scale(1.3).move_to(el1.get_center()-veci*0.3).set_color(trans_colmap[trans_id])
path = VMobject().set_color(trans_colmap[trans_id])
path.set_points_as_corners([el1.get_center(),el1.get_center()+UP*0.01])
def update_path(path):
previus_path = path.copy()
previus_path.add_points_as_corners([el1.get_center()])
path.become(previus_path)
path.add_updater(update_path)
pathSolid = Line(el1,el2).set_color(trans_colmap[trans_id])
new_intepolated_color= interpolate_color(WHITE,trans_colmap[trans_id], 0.4)
dotphoton= Dot().scale(4).set_color(new_intepolated_color)
arc= Arc(-TAU/2, 3*TAU/2, radius=dotphoton.get_width()/2 , arc_center= dotphoton.get_center())
arc.set_color(trans_colmap[trans_id])
x = np.linspace(-PI,PI,100)
y = 2*np.sin(2*x)
sin_curve = VMobject()
sin_curve.set_points_smoothly([[xi,yi,0]
for xi, yi in zip(x,y) ] )
sin_curve.scale(0.06).set_color(trans_colmap[trans_id])
photon1 = VGroup(dotphoton,sin_curve,arc).move_to(Line(el1,el2).get_center()).shift(2*RIGHT)
i1 = MathTex(trans_wavelengths[trans_id], color= BLACK).scale(0.6)
i1.to_corner(DR,buff=SMALL_BUFF).shift(1.5*LEFT)
i1.background_rectangle = BackgroundRectangle(i1, color=WHITE, opacity= 0.2)
self.add(path)
self.play(FadeIn(elektron1))
self.play(FadeIn(CanvasNameObject),run_time=0.4)
self.play( elektron1.move_to,elektron2 , run_time= 0.5, rate_func= linear) #####
self.add(i1.background_rectangle,i1)
self.play(Transform(pathSolid, sin_curve), rate_func= linear, run_time=1 )
self.add(photon1)
self.play(VGroup(pathSolid,photon1).shift ,3*RIGHT, rate_func= linear, run_time= 1.5)
self.play(FadeOutAndShift(VGroup(pathSolid,photon1) ,RIGHT, rate_func= linear, run_time= 0.5))
self.play(FadeOut(elektron1),run_time=0.5)
self.remove(i1.background_rectangle,i1)
self.wait(0.5)
path.remove_updater(update_path)
#third
position_shift = pos0-2*offset
trans_id = "t52"
levup = "n5"
levdown= "n2"
el1 = Dot().set_color(YELLOW).scale(2)
el1.move_to(core.get_center()+UP*levels[levup])
el1.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el1.get_center())
elektron1 = VGroup(el1,t)
el2 = Dot().set_color(YELLOW).scale(2)
el2.move_to(core.get_center()+UP*levels[levdown])
el2.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el2.get_center())
elektron2 = VGroup(el2,t)
veci = Line(el1,el2).get_vector()
veci=veci/get_norm(veci)
print(veci)
CanvasNameObject= MathTex(trans_names[trans_id]).scale(1.3).move_to(el1.get_center()-veci*0.3).set_color(trans_colmap[trans_id])
path = VMobject().set_color(trans_colmap[trans_id])
path.set_points_as_corners([el1.get_center(),el1.get_center()+UP*0.01])
def update_path(path):
previus_path = path.copy()
previus_path.add_points_as_corners([el1.get_center()])
path.become(previus_path)
path.add_updater(update_path)
pathSolid = Line(el1,el2).set_color(trans_colmap[trans_id])
new_intepolated_color= interpolate_color(WHITE,trans_colmap[trans_id], 0.4)
dotphoton= Dot().scale(4).set_color(new_intepolated_color)
arc= Arc(-TAU/2, 3*TAU/2, radius=dotphoton.get_width()/2 , arc_center= dotphoton.get_center())
arc.set_color(trans_colmap[trans_id])
x = np.linspace(-PI,PI,100)
y = 2*np.sin(2*x)
sin_curve = VMobject()
sin_curve.set_points_smoothly([[xi,yi,0]
for xi, yi in zip(x,y) ] )
sin_curve.scale(0.06).set_color(trans_colmap[trans_id])
photon1 = VGroup(dotphoton,sin_curve,arc).move_to(Line(el1,el2).get_center()).shift(2*RIGHT)
i1 = MathTex(trans_wavelengths[trans_id], color= BLACK).scale(0.6)
i1.to_corner(DR,buff=SMALL_BUFF).shift(1.5*LEFT)
i1.background_rectangle = BackgroundRectangle(i1, color=WHITE, opacity= 0.2)
self.add(path)
self.play(FadeIn(elektron1))
self.play(FadeIn(CanvasNameObject),run_time=0.4)
self.play( elektron1.move_to,elektron2 , run_time= 0.5, rate_func= linear) #####
self.add(i1.background_rectangle,i1)
self.play(Transform(pathSolid, sin_curve), rate_func= linear, run_time=1 )
self.add(photon1)
self.play(VGroup(pathSolid,photon1).shift ,3*RIGHT, rate_func= linear, run_time= 1.5)
self.play(FadeOutAndShift(VGroup(pathSolid,photon1) ,RIGHT, rate_func= linear, run_time= 0.5))
self.play(FadeOut(elektron1),run_time=0.5)
self.remove(i1.background_rectangle,i1)
self.wait(0.5)
path.remove_updater(update_path)
#four
position_shift = pos0-3*offset
trans_id = "t62"
levup = "n6"
levdown= "n2"
el1 = Dot().set_color(YELLOW).scale(2)
el1.move_to(core.get_center()+UP*levels[levup])
el1.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el1.get_center())
elektron1 = VGroup(el1,t)
el2 = Dot().set_color(YELLOW).scale(2)
el2.move_to(core.get_center()+UP*levels[levdown])
el2.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el2.get_center())
elektron2 = VGroup(el2,t)
veci = Line(el1,el2).get_vector()
veci=veci/get_norm(veci)
print(veci)
CanvasNameObject= MathTex(trans_names[trans_id]).scale(1.3).move_to(el1.get_center()-veci*0.3).set_color(trans_colmap[trans_id])
path = VMobject().set_color(trans_colmap[trans_id])
path.set_points_as_corners([el1.get_center(),el1.get_center()+UP*0.01])
def update_path(path):
previus_path = path.copy()
previus_path.add_points_as_corners([el1.get_center()])
path.become(previus_path)
path.add_updater(update_path)
pathSolid = Line(el1,el2).set_color(trans_colmap[trans_id])
new_intepolated_color= interpolate_color(WHITE,trans_colmap[trans_id], 0.4)
dotphoton= Dot().scale(4).set_color(new_intepolated_color)
arc= Arc(-TAU/2, 3*TAU/2, radius=dotphoton.get_width()/2 , arc_center= dotphoton.get_center())
arc.set_color(trans_colmap[trans_id])
x = np.linspace(-PI,PI,100)
y = 2*np.sin(2*x)
sin_curve = VMobject()
sin_curve.set_points_smoothly([[xi,yi,0]
for xi, yi in zip(x,y) ] )
sin_curve.scale(0.06).set_color(trans_colmap[trans_id])
photon1 = VGroup(dotphoton,sin_curve,arc).move_to(Line(el1,el2).get_center()).shift(2*RIGHT)
i1 = MathTex(trans_wavelengths[trans_id], color= BLACK).scale(0.6)
i1.to_corner(DR,buff=SMALL_BUFF).shift(1.5*LEFT)
i1.background_rectangle = BackgroundRectangle(i1, color=WHITE, opacity= 0.2)
self.add(path)
self.play(FadeIn(elektron1))
self.play(FadeIn(CanvasNameObject),run_time=0.4)
self.play( elektron1.move_to,elektron2 , run_time= 0.5, rate_func= linear) #####
self.add(i1.background_rectangle,i1)
self.play(Transform(pathSolid, sin_curve), rate_func= linear, run_time=1 )
self.add(photon1)
self.play(VGroup(pathSolid,photon1).shift ,3*RIGHT, rate_func= linear, run_time= 1.5)
self.play(FadeOutAndShift(VGroup(pathSolid,photon1) ,RIGHT, rate_func= linear, run_time= 0.5))
self.play(FadeOut(elektron1),run_time=0.5)
self.remove(i1.background_rectangle,i1)
self.wait(0.5)
path.remove_updater(update_path)
#five
position_shift = pos0-4*offset
trans_id = "t72"
levup = "n7"
levdown= "n2"
el1 = Dot().set_color(YELLOW).scale(2)
el1.move_to(core.get_center()+UP*levels[levup])
el1.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el1.get_center())
elektron1 = VGroup(el1,t)
el2 = Dot().set_color(YELLOW).scale(2)
el2.move_to(core.get_center()+UP*levels[levdown])
el2.rotate(position_shift*PI/10 ,about_point= core.get_center())
t=MathTex("-").set_color(BLACK).move_to(el2.get_center())
elektron2 = VGroup(el2,t)
veci = Line(el1,el2).get_vector()
veci=veci/get_norm(veci)
print(veci)
CanvasNameObject= MathTex(trans_names[trans_id]).scale(1.3).move_to(el1.get_center()-veci*0.3).set_color(trans_colmap[trans_id])
path = VMobject().set_color(trans_colmap[trans_id])
path.set_points_as_corners([el1.get_center(),el1.get_center()+UP*0.01])
def update_path(path):
previus_path = path.copy()
previus_path.add_points_as_corners([el1.get_center()])
path.become(previus_path)
path.add_updater(update_path)
pathSolid = Line(el1,el2).set_color(trans_colmap[trans_id])
new_intepolated_color= interpolate_color(WHITE,trans_colmap[trans_id], 0.4)
dotphoton= Dot().scale(4).set_color(new_intepolated_color)
arc= Arc(-TAU/2, 3*TAU/2, radius=dotphoton.get_width()/2 , arc_center= dotphoton.get_center())
arc.set_color(trans_colmap[trans_id])
x = np.linspace(-PI,PI,100)
y = 2*np.sin(2*x)
sin_curve = VMobject()
sin_curve.set_points_smoothly([[xi,yi,0]
for xi, yi in zip(x,y) ] )
sin_curve.scale(0.06).set_color(trans_colmap[trans_id])
photon1 = VGroup(dotphoton,sin_curve,arc).move_to(Line(el1,el2).get_center()).shift(2*RIGHT)
i1 = MathTex(trans_wavelengths[trans_id], color= BLACK).scale(0.6)
i1.to_corner(DR,buff=SMALL_BUFF).shift(1.5*LEFT)
i1.background_rectangle = BackgroundRectangle(i1, color=WHITE, opacity= 0.2)
self.add(path)
self.play(FadeIn(elektron1))
self.play(FadeIn(CanvasNameObject),run_time=0.4)
self.play( elektron1.move_to,elektron2 , run_time= 0.5, rate_func= linear) #####
self.add(i1.background_rectangle,i1)
self.play(Transform(pathSolid, sin_curve), rate_func= linear, run_time=1 )
self.add(photon1)
self.play(VGroup(pathSolid,photon1).shift ,3*RIGHT, rate_func= linear, run_time= 1.5)
self.play(FadeOutAndShift(VGroup(pathSolid,photon1) ,RIGHT, rate_func= linear, run_time= 0.5))
self.play(FadeOut(elektron1),run_time=0.5)
self.remove(i1.background_rectangle,i1)
self.wait(0.5)
path.remove_updater(update_path)
import os ; import sys
from pathlib import Path
if __name__ == "__main__":
project_path = Path(sys.path[1]).parent
script_name = f"{Path(__file__).resolve()}"
os.system(f"manim --custom_folders --disable_caching -p -c 'WHITE' --config_file '{project_path}/manim_settings.cfg' " + script_name)
| 46.954907
| 141
| 0.624675
| 2,506
| 17,702
| 4.197127
| 0.086991
| 0.054763
| 0.045636
| 0.051341
| 0.848926
| 0.84132
| 0.838087
| 0.838087
| 0.838087
| 0.838087
| 0
| 0.043987
| 0.228166
| 17,702
| 377
| 141
| 46.954907
| 0.725829
| 0.005875
| 0
| 0.767584
| 0
| 0.018349
| 0.040763
| 0.003473
| 0
| 0
| 0
| 0.002653
| 0
| 1
| 0.021407
| false
| 0
| 0.009174
| 0
| 0.036697
| 0.015291
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43026b128d79a69a928dc2be56221d7369457a22
| 35,591
|
py
|
Python
|
sdk/digitaltwins/azure-digitaltwins-core/tests/test_relationships_async.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/digitaltwins/azure-digitaltwins-core/tests/test_relationships_async.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/digitaltwins/azure-digitaltwins-core/tests/test_relationships_async.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE.txt in the project root for
# license information.
# -------------------------------------------------------------------------
import pytest
import uuid
from devtools_testutils import AzureTestCase
from _preparer import DigitalTwinsRGPreparer, DigitalTwinsPreparer
from azure.digitaltwins.core.aio import DigitalTwinsClient
from azure.core import MatchConditions
from azure.core.exceptions import (
ResourceNotFoundError,
HttpResponseError,
ResourceExistsError,
ResourceModifiedError,
ResourceNotModifiedError
)
BUILDING_MODEL_ID = "dtmi:samples:RelationshipTestBuilding;1"
FLOOR_MODEL_ID = "dtmi:samples:RelationshipTestFloor;1"
ROOM_MODEL_ID = "dtmi:samples:RelationshipTestRoom;1"
BUILDING_DIGITAL_TWIN = "DTRelationshipTestsBuildingTwin"
FLOOR_DIGITAL_TWIN = "DTRelationshipTestsFloorTwin"
ROOM_DIGITAL_TWIN = "DTRelationshipTestsRoomTwin"
class DigitalTwinsRelationshipTestsAsync(AzureTestCase):
def _get_client(self, endpoint, **kwargs):
credential = self.get_credential(DigitalTwinsClient, is_async=True)
return self.create_client_from_credential(
DigitalTwinsClient,
credential,
endpoint=endpoint,
**kwargs)
async def _clean_up_models(self, client, *models):
models = []
async for m in client.list_models():
models.append(m)
while models:
print("Cleaning up {} models".format(len(models)))
for model in models:
try:
await client.delete_model(model.id)
except:
pass
models = []
async for m in client.list_models():
models.append(m)
async def _clean_up_relationships(self, client):
for dt_id in [ROOM_DIGITAL_TWIN, FLOOR_DIGITAL_TWIN, BUILDING_DIGITAL_TWIN]:
async for relationship in client.list_relationships(dt_id):
await client.delete_relationship(
dt_id,
relationship['$relationshipId']
)
async def _clean_up_twins(self, client):
for dt_id in [ROOM_DIGITAL_TWIN, FLOOR_DIGITAL_TWIN, BUILDING_DIGITAL_TWIN]:
await client.delete_digital_twin(dt_id)
async def _set_up_models(self, client, *delete_old):
await self._clean_up_models(client)
await self._clean_up_relationships(client)
await self._clean_up_twins(client)
dtdl_model_building = {
"@id": BUILDING_MODEL_ID,
"@type": "Interface",
"@context": "dtmi:dtdl:context;2",
"displayName": "Building",
"contents": [
{
"@type": "Relationship",
"name": "has",
"target": FLOOR_MODEL_ID,
"properties": [
{
"@type": "Property",
"name": "isAccessRestricted",
"schema": "boolean"
}
]
},
{
"@type": "Property",
"name": "AverageTemperature",
"schema": "double"
}
]
}
dtdl_model_floor = {
"@id": FLOOR_MODEL_ID,
"@type": "Interface",
"@context": "dtmi:dtdl:context;2",
"displayName": "Floor",
"contents": [
{
"@type": "Relationship",
"name": "contains",
"target": ROOM_MODEL_ID
},
{
"@type": "Property",
"name": "AverageTemperature",
"schema": "double"
}
]
}
dtdl_model_room = {
"@id": ROOM_MODEL_ID,
"@type": "Interface",
"@context": "dtmi:dtdl:context;2",
"displayName": "Room",
"contents": [
{
"@type": "Property",
"name": "Temperature",
"schema": "double"
},
{
"@type": "Property",
"name": "IsOccupied",
"schema": "boolean"
}
]
}
await client.create_models([dtdl_model_building, dtdl_model_floor, dtdl_model_room])
building_digital_twin = {
"$metadata": {
"$model": BUILDING_MODEL_ID
},
"AverageTemperature": 68,
}
await client.upsert_digital_twin(BUILDING_DIGITAL_TWIN, building_digital_twin)
floor_digital_twin = {
"$metadata": {
"$model": FLOOR_MODEL_ID
},
"AverageTemperature": 75
}
await client.upsert_digital_twin(FLOOR_DIGITAL_TWIN, floor_digital_twin)
room_digital_twin = {
"$metadata": {
"$model": ROOM_MODEL_ID
},
"Temperature": 80,
"IsOccupied": True
}
await client.upsert_digital_twin(ROOM_DIGITAL_TWIN, room_digital_twin)
if delete_old:
try:
client.delete_relationship(*delete_old)
except ResourceNotFoundError:
pass
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_create_basic_relationship(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
relationship = {
"$relationshipId": "FloorContainsRoom",
"$sourceId": FLOOR_DIGITAL_TWIN,
"$relationshipName": "contains",
"$targetId": ROOM_DIGITAL_TWIN
}
created_relationship = await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship)
assert created_relationship['$relationshipId'] == "FloorContainsRoom"
assert created_relationship['$etag']
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_create_invalid_relationship(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
relationship = {
"$relationshipId": "FloorContainsRoom",
"$sourceId": FLOOR_DIGITAL_TWIN,
"$relationshipName": "contains",
"$targetId": ROOM_DIGITAL_TWIN
}
with pytest.raises(ResourceNotFoundError):
await client.upsert_relationship(
"foo",
"FloorContainsRoom",
relationship)
upserted = await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"foo",
relationship)
assert upserted['$relationshipId'] == 'foo'
relationship = {
"$relationshipId": "FloorContainsRoom",
"$sourceId": FLOOR_DIGITAL_TWIN,
"$relationshipName": "contains",
"$targetId": "foo"
}
with pytest.raises(HttpResponseError):
await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship)
relationship = {
"$relationshipId": "FloorContainsRoom",
"$sourceId": "foo",
"$relationshipName": "contains",
"$targetId": ROOM_DIGITAL_TWIN
}
upserted = await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship)
assert upserted['$sourceId'] == 'DTRelationshipTestsFloorTwin'
relationship = {
"$relationshipName": "contains",
"$targetId": ROOM_DIGITAL_TWIN
}
upserted = await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship)
assert upserted['$sourceId'] == 'DTRelationshipTestsFloorTwin'
assert upserted['$relationshipId'] == 'FloorContainsRoom'
relationship = {
"$relationshipId": "foo",
"$sourceId": FLOOR_DIGITAL_TWIN,
"$relationshipName": "contains",
"$targetId": ROOM_DIGITAL_TWIN
}
upserted = await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship)
assert upserted['$relationshipId'] == 'FloorContainsRoom'
relationship = {
"$targetId": ROOM_DIGITAL_TWIN
}
with pytest.raises(HttpResponseError):
await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship)
with pytest.raises(HttpResponseError):
await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
{})
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_create_relationship_conditionally_if_missing(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client, FLOOR_DIGITAL_TWIN, "FloorContainsRoom")
relationship = {
"$relationshipId": "FloorContainsRoom",
"$sourceId": FLOOR_DIGITAL_TWIN,
"$relationshipName": "contains",
"$targetId": ROOM_DIGITAL_TWIN
}
created_relationship = await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship)
assert created_relationship.get('$relationshipId') == "FloorContainsRoom"
with pytest.raises(ResourceExistsError):
await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship,
match_condition=MatchConditions.IfMissing)
@pytest.mark.skip("Conditional etag does not appear to be supported")
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_create_relationship_conditionally_if_modified(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
relationship = {
"$relationshipId": "FloorContainsRoom",
"$sourceId": FLOOR_DIGITAL_TWIN,
"$relationshipName": "contains",
"$targetId": ROOM_DIGITAL_TWIN
}
created_relationship = await client.upsert_relationship(FLOOR_DIGITAL_TWIN, "FloorContainsRoom", relationship)
assert created_relationship.get('$relationshipId') == "FloorContainsRoom"
with pytest.raises(ResourceNotModifiedError):
await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship,
match_condition=MatchConditions.IfModified,
etag=created_relationship.get('$etag'))
updated = await client.upsert_relationship(
FLOOR_DIGITAL_TWIN,
"FloorContainsRoom",
relationship,
match_condition=MatchConditions.IfModified,
etag='W/"7e67a355-f19c-4c19-8a10-2d69b2d2253f"')
assert updated['$relationshipId'] == "FloorContainsRoom"
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_upsert_relationship(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
created_relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
relationship)
assert created_relationship['$relationshipId'] == "BuildingHasFloor"
assert created_relationship['isAccessRestricted'] == False
relationship["isAccessRestricted"] = True
upserted_relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
relationship)
assert upserted_relationship['$relationshipId'] == "BuildingHasFloor"
assert upserted_relationship['isAccessRestricted'] == True
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_upsert_relationship_invalid_conditions(self, resource_group, location, digitaltwin):
relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
client = self._get_client(digitaltwin.host_name)
with pytest.raises(ValueError):
await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
relationship,
match_condition=MatchConditions.IfMissing,
etag='etag-value')
with pytest.raises(ValueError):
await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
relationship,
match_condition=MatchConditions.IfModified)
with pytest.raises(ValueError):
await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
relationship,
match_condition=MatchConditions.IfNotModified,
etag='etag-value')
with pytest.raises(ValueError):
await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
relationship,
match_condition=MatchConditions.IfPresent)
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_get_relationship(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
created_relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
relationship = await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
assert created_relationship == relationship
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_get_relationship_not_existing(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
with pytest.raises(ResourceNotFoundError):
await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasRoof")
with pytest.raises(ResourceNotFoundError):
await client.get_relationship("NotABuilding", "BuildingHasFloor")
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_delete_relationship(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
deleted = await client.delete_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
assert deleted is None
with pytest.raises(ResourceNotFoundError):
await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_delete_relationship_not_existing(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
with pytest.raises(ResourceNotFoundError):
await client.delete_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasRoof")
with pytest.raises(ResourceNotFoundError):
await client.delete_relationship("NotABuilding", "BuildingHasFloor")
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_delete_relationship_conditionally_if_not_modified(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
with pytest.raises(ResourceModifiedError):
await client.delete_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
match_condition=MatchConditions.IfNotModified,
etag='W/"7e67a355-f19c-4c19-8a10-2d69b2d2253f"')
deleted = await client.delete_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
match_condition=MatchConditions.IfNotModified,
etag=relationship['$etag'])
assert deleted is None
with pytest.raises(ResourceNotFoundError):
await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_delete_relationship_conditionally_if_present(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
deleted = await client.delete_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
match_condition=MatchConditions.IfPresent)
assert deleted is None
with pytest.raises(ResourceNotFoundError):
await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_delete_relationship_invalid_conditions(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
with pytest.raises(ValueError):
await client.delete_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
match_condition=MatchConditions.IfPresent,
etag='etag-value')
with pytest.raises(ValueError):
await client.delete_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
match_condition=MatchConditions.IfNotModified)
with pytest.raises(ValueError):
await client.delete_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
match_condition=MatchConditions.IfModified,
etag='etag-value')
with pytest.raises(ValueError):
await client.delete_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
match_condition=MatchConditions.IfMissing)
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_update_relationship_replace(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
assert relationship['isAccessRestricted'] == False
patch = [
{
"op": "replace",
"path": "/isAccessRestricted",
"value": True
}
]
update = await client.update_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor", patch)
assert update is None
updated = await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
assert updated['isAccessRestricted'] == True
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_update_relationship_remove(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
assert relationship['isAccessRestricted'] == False
patch = [
{
"op": "remove",
"path": "/isAccessRestricted",
}
]
update = await client.update_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor", patch)
assert update is None
updated = await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
assert 'isAccessRestricted' not in updated
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_update_relationship_add(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
assert relationship['isAccessRestricted'] == False
patch = [
{
"op": "add",
"path": "/isAccessRestricted",
"value": True
}
]
update = await client.update_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor", patch)
assert update is None
updated = await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
assert updated['isAccessRestricted'] == True
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_update_relationship_multiple(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
assert relationship['isAccessRestricted'] == False
patch = [
{
"op": "replace",
"path": "/isAccessRestricted",
"value": True
},
{
"op": "remove",
"path": "/isAccessRestricted"
}
]
update = await client.update_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor", patch)
assert update is None
updated = await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
assert 'isAccessRestricted' not in updated
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_update_relationship_invalid_patch(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
patch = [
{
"op": "move",
"path": "/isAccessRestricted"
}
]
with pytest.raises(HttpResponseError):
await client.update_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor", patch)
patch = [
{
"op": "remove",
"path": "/isAccessDoorRestricted"
}
]
with pytest.raises(HttpResponseError):
await client.update_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor", patch)
patch = {
"isAccessRestricted": True
}
with pytest.raises(HttpResponseError):
await client.update_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor", patch)
patch = [{}]
with pytest.raises(HttpResponseError):
await client.update_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor", patch)
patch = []
await client.update_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor", patch)
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_update_relationship_conditionally_if_not_modified(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
patch = [
{
"op": "replace",
"path": "/isAccessRestricted",
"value": True
}
]
with pytest.raises(ResourceModifiedError):
await client.update_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
patch,
match_condition=MatchConditions.IfNotModified,
etag='W/"7e67a355-f19c-4c19-8a10-2d69b2d2253f"')
await client.update_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
patch,
match_condition=MatchConditions.IfNotModified,
etag=relationship['$etag'])
updated = await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
assert updated['isAccessRestricted'] == True
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_update_relationship_conditionally_if_present(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
patch = [
{
"op": "replace",
"path": "/isAccessRestricted",
"value": True
}
]
await client.update_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
patch,
match_condition=MatchConditions.IfPresent)
updated = await client.get_relationship(BUILDING_DIGITAL_TWIN, "BuildingHasFloor")
assert updated['isAccessRestricted'] == True
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_update_relationship_invalid_conditions(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
patch = [
{
"op": "replace",
"path": "/isAccessRestricted",
"value": True
}
]
with pytest.raises(ValueError):
await client.update_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
patch,
match_condition=MatchConditions.IfPresent,
etag='etag-value')
with pytest.raises(ValueError):
await client.update_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
patch,
match_condition=MatchConditions.IfNotModified)
with pytest.raises(ValueError):
await client.update_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
patch,
match_condition=MatchConditions.IfModified,
etag='etag-value')
with pytest.raises(ValueError):
await client.update_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
patch,
match_condition=MatchConditions.IfMissing)
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_update_relationship_not_existing(self, resource_group, location, digitaltwin):
patch = [
{
"op": "replace",
"path": "/Property1",
"value": 42
}
]
client = self._get_client(digitaltwin.host_name)
with pytest.raises(ResourceNotFoundError):
await client.update_relationship(BUILDING_DIGITAL_TWIN, "foo", patch)
with pytest.raises(ResourceNotFoundError):
await client.update_relationship("foo", "BuildingHasFloor", patch)
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_list_relationships(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
all_relationships = []
async for r in client.list_relationships(BUILDING_DIGITAL_TWIN):
all_relationships.append(r)
assert relationship in all_relationships
assert all_relationships[0]['$relationshipId'] == "BuildingHasFloor"
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_list_relationship_by_id(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
all_relationships = []
async for r in client.list_relationships(BUILDING_DIGITAL_TWIN, relationship_id="BuildingHasFloor"):
all_relationships.append(r)
assert len(all_relationships) == 0
@DigitalTwinsRGPreparer(name_prefix="dttest")
@DigitalTwinsPreparer(name_prefix="dttest")
async def test_list_incoming_relationships(self, resource_group, location, digitaltwin):
client = self._get_client(digitaltwin.host_name)
await self._set_up_models(client)
new_relationship = {
"$relationshipId": "BuildingHasFloor",
"$sourceId": BUILDING_DIGITAL_TWIN,
"$relationshipName": "has",
"$targetId": FLOOR_DIGITAL_TWIN,
"isAccessRestricted": False
}
relationship = await client.upsert_relationship(
BUILDING_DIGITAL_TWIN,
"BuildingHasFloor",
new_relationship)
all_relationships = []
async for r in client.list_incoming_relationships(BUILDING_DIGITAL_TWIN):
all_relationships.append(r)
assert relationship not in all_relationships
| 39.810962
| 118
| 0.611615
| 2,844
| 35,591
| 7.366737
| 0.068214
| 0.074555
| 0.074364
| 0.08434
| 0.859195
| 0.8349
| 0.830605
| 0.824734
| 0.807455
| 0.801966
| 0
| 0.003317
| 0.296901
| 35,591
| 893
| 119
| 39.855543
| 0.833919
| 0.008373
| 0
| 0.712546
| 0
| 0
| 0.157849
| 0.011194
| 0
| 0
| 0
| 0
| 0.043849
| 1
| 0.001218
| false
| 0.002436
| 0.008526
| 0
| 0.01218
| 0.001218
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4310903e36cc4c190deb93a113cea0b2cb148350
| 76
|
py
|
Python
|
experimental_models/utils/__init__.py
|
justinbt1/Multimodal-Document-Classification
|
794eb1e1235efc9c81f1edca881db576d754628a
|
[
"MIT"
] | null | null | null |
experimental_models/utils/__init__.py
|
justinbt1/Multimodal-Document-Classification
|
794eb1e1235efc9c81f1edca881db576d754628a
|
[
"MIT"
] | null | null | null |
experimental_models/utils/__init__.py
|
justinbt1/Multimodal-Document-Classification
|
794eb1e1235efc9c81f1edca881db576d754628a
|
[
"MIT"
] | null | null | null |
from .data import *
from .models_images import *
from .models_text import *
| 19
| 28
| 0.763158
| 11
| 76
| 5.090909
| 0.545455
| 0.357143
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 76
| 3
| 29
| 25.333333
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4314ec620840c4177444445838797fafbcb1a029
| 162
|
py
|
Python
|
Configuration/Generator/python/BsMM_EXTRAS_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
Configuration/Generator/python/BsMM_EXTRAS_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
Configuration/Generator/python/BsMM_EXTRAS_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
from Configuration.Generator.BsMM_cfi import *
from Configuration.Generator.BsMM_filt_cfi import *
ProductionFilterSequence = cms.Sequence(MuFilter+MuMuFilter)
| 27
| 60
| 0.851852
| 18
| 162
| 7.5
| 0.666667
| 0.251852
| 0.385185
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080247
| 162
| 5
| 61
| 32.4
| 0.90604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4a332cbaab2f4f972f703edda528f7037dd53bac
| 221,884
|
py
|
Python
|
src/genie/libs/parser/nxos/show_pim.py
|
pwoconnor/genieparser
|
6c764dea0f452d75bc02f3aff3dbe16aec543c81
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/nxos/show_pim.py
|
pwoconnor/genieparser
|
6c764dea0f452d75bc02f3aff3dbe16aec543c81
|
[
"Apache-2.0"
] | 1
|
2019-04-02T16:51:56.000Z
|
2019-04-02T16:51:56.000Z
|
src/genie/libs/parser/nxos/show_pim.py
|
pwoconnor/genieparser
|
6c764dea0f452d75bc02f3aff3dbe16aec543c81
|
[
"Apache-2.0"
] | 1
|
2021-01-29T17:31:33.000Z
|
2021-01-29T17:31:33.000Z
|
import re
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, Any, Optional
from genie.libs.parser.utils.common import Common
# ====================================================
# schema Parser for 'show ipv6 pim interface'
# ====================================================
class ShowIpv6PimInterfaceSchema(MetaParser):
"""Schema for show ipv6 pim interface"""
schema = {
'vrf':{
Any():{
'interfaces':{
Any():{
'address_family': {
Any(): {
Optional('oper_status'): str,
Optional('link_status'): str,
Optional('admin_status'): str,
Optional('address'): list,
Optional('dr_address'): str,
Optional('dr_priority'): int,
Optional('configured_dr_priority'): int,
Optional('neighbor_count'): int,
Optional('hello_interval'): int,
Optional('hello_expiration'): str,
Optional('neighbor_holdtime'): int,
Optional('dr_delay'): int,
Optional('bsr_border'): bool,
Optional('genid'): str,
Optional('hello_md5_ah_authentication'): str,
Optional('neighbor_filter'): str,
Optional('jp_inbound_policy'): str,
Optional('jp_outbound_policy'): str,
Optional('jp_interval'): int,
Optional('jp_next_sending'): int,
Optional('bfd'):{
Optional('enable'): bool,
},
Optional('sm'):{
Optional('passive'): bool,
},
Optional('vpc_svi'): bool,
Optional('auto_enabled'): bool,
Optional('statistics'):{
Optional('last_reset'): str,
Optional('general'):{
Optional('hellos'): str,
Optional('jps'): str,
Optional('asserts'): str,
Optional('grafts'): str,
Optional('graft_acks'): str,
Optional('df_offers'): str,
Optional('df_winners'): str,
Optional('df_backoffs'): str,
Optional('df_passes'): str,
},
Optional('errors'):{
Optional('checksum'): int,
Optional('invalid_packet_types'): int,
Optional('invalid_df_subtypes'): int,
Optional('authentication_failed'): int,
Optional('packet_length_errors'): int,
Optional('bad_version_packets'): int,
Optional('packets_from_self'): int,
Optional('packets_from_non_neighbors'): int,
Optional('packets_received_on_passiveinterface'): int,
Optional('jps_received_on_rpf_interface'): int,
Optional('joins_received_with_no_rp'): int,
Optional('joins_received_with_wrong_rp'): int,
Optional('joins_received_with_ssm_groups'): int,
Optional('joins_received_with_bidir_groups'): int,
Optional('jps_filtered_by_inbound_policy'): int,
Optional('jps_filtered_by_outbound_policy'): int,
},
},
},
},
},
},
},
},
}
# ==========================================================
# parser for show ipv6 pim interface vrf <word>
# parser for show ipv6 pim interface
# parser for show ipv6 pim interface <word>
# parser for show ipv6 pim interface <word1> vrf <word2>
#
# ==========================================================
class ShowIpv6PimInterface(ShowIpv6PimInterfaceSchema):
"""Parser for:
show ipv6 pim interface vrf <vrf>
show ipv6 pim interface
show ipv6 pim interface <interface>
show ipv6 pim interface <interface> vrf <vrf>"""
cli_command = ['show ipv6 pim interface {interface} vrf {vrf}', 'show ipv6 pim interface vrf {vrf}',
'show ipv6 pim interface {interface}','show ipv6 pim interface']
def cli(self , interface ="", vrf="",output=None):
if not vrf and not interface:
cmd = self.cli_command[3]
if not vrf and interface:
cmd = self.cli_command[2].format(interface=interface)
if vrf and not interface:
cmd = self.cli_command[1].format(vrf=vrf)
if vrf and interface:
cmd = self.cli_command[0].format(interface=interface, vrf=vrf)
if output is None:
out = self.device.execute(cmd)
else:
out = output
af_name = 'ipv6'
# Init dictionary
parsed_dict = dict()
address_list = []
checksum = invalid_packet_types = invalid_df_subtypes = authentication_failed \
= packet_length_errors = bad_version_packets = packets_from_self =\
packets_from_non_neighbors = packets_received_on_passiveinterface = \
jps_received_on_rpf_interface = joins_received_with_bidir_groups = \
joins_received_with_no_rp = joins_received_with_ssm_groups = joins_received_with_wrong_rp = \
jps_filtered_by_inbound_policy = jps_filtered_by_outbound_policy = hellos = jps = asserts = grafts\
= graft_acks = df_backoffs = df_passes = df_winners = df_offers = ""
oper_status = link_status = admin_status = interface_name = dr_address = bsr_border \
= hello_md5_ah_authentication = \
hello_interval = hello_expiration = dr_priority = configured_dr_delay = jp_next_sending = bfd\
= jp_interval = passive = auto_enabled = genid = jp_outbound_policy = jp_inbound_policy = \
nbr_count = neighbor_holdtime = neighbor_filter = vpc_svi =last_rest = ""
for line in out.splitlines():
line = line.rstrip()
#PIM6 Interface Status for VRF "VRF1"
p1 = re.compile(r'^\s*PIM6 +Interface +Status +for +VRF+ \"(?P<vrf>[\w]+)\"$')
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
checksum = invalid_packet_types = invalid_df_subtypes = authentication_failed \
= packet_length_errors = bad_version_packets = packets_from_self = \
packets_from_non_neighbors = packets_received_on_passiveinterface = \
jps_received_on_rpf_interface = joins_received_with_bidir_groups = \
joins_received_with_no_rp = joins_received_with_ssm_groups = joins_received_with_wrong_rp = \
jps_filtered_by_inbound_policy = jps_filtered_by_outbound_policy = hellos = jps = asserts = grafts \
= graft_acks = df_backoffs = df_passes = df_winners = df_offers = ""
oper_status = link_status = admin_status = interface_name = dr_address = bsr_border \
= hello_md5_ah_authentication = \
hello_interval = hello_expiration = dr_priority = configured_dr_delay = jp_next_sending = bfd \
= jp_interval = passive = auto_enabled = genid = jp_outbound_policy = jp_inbound_policy = \
nbr_count = configured_dr_priority = neighbor_holdtime = neighbor_filter = vpc_svi =last_rest = ""
# Ethernet2/2, Interface status: protocol-up/link-up/admin-up
p2 = re.compile(r'^\s*(?P<interface_name>[\w\/\.\-]+),'
' +Interface +status: +protocol\-(?P<oper_status>[\w]+)/'
'link\-(?P<link_status>[\w]+)/'
'admin\-(?P<admin_status>[\w]+)$')
m = p2.match(line)
if m:
interface_name = m.groupdict()['interface_name']
oper_status = m.groupdict()['oper_status']
link_status = m.groupdict()['link_status']
admin_status = m.groupdict()['admin_status']
address_list = []
# IPv6 address:
# 10.11.33.11, IP subnet: 10.11.33.0/24
p3 = re.compile(r'^\s*(?P<space>\s{4})'
'(?P<address>[^Error:][\w\/\:]+)( +\[VALID\])?$')
m = p3.match(line)
if m:
address_list.append(m.groupdict()['address'])
# PIM6 DR: fe80::5054:ff:fe89:740c, DR's priority: 1
p4 = re.compile(r'^\s*PIM6 +DR: +(?P<dr_address>[\w\:]+),'
' +DR\'s +priority: +(?P<dr_priority>[\d]+)$')
m = p4.match(line)
if m:
dr_address = m.groupdict()['dr_address']
dr_priority = m.groupdict()['dr_priority']
# PIM6 neighbor count: 1
p5 = re.compile(r'^\s*PIM6 +neighbor +count: +(?P<nbr_count>[\d]+)$')
m = p5.match(line)
if m:
nbr_count = m.groupdict()['nbr_count']
# PIM6 hello interval: 45 secs (configured 44444 ms), next hello sent in: 00:00:05
p6 = re.compile(r'^\s*PIM6 +hello +interval: +(?P<hello_interval>[\d]+) +secs'
'( +\(configured +(?P<configured_interval_ms>\d+) +ms\))?,'
' +next +hello +sent +in: +(?P<hello_expiration>[\w\:]+)$')
m = p6.match(line)
if m:
hello_interval = m.groupdict()['hello_interval']
hello_expiration = m.groupdict()['hello_expiration']
# PIM6 neighbor holdtime: 159 secs
p7 = re.compile(r'^\s*PIM6 +neighbor +holdtime: +(?P<holdtime>[\d]+) +secs$')
m = p7.match(line)
if m:
neighbor_holdtime = m.groupdict()['holdtime']
# PIM6 configured DR priority: 144
p8 = re.compile(r'^\s*PIM6 +configured +DR +priority: +(?P<configured_dr_priority>[\d]+)$')
m = p8.match(line)
if m:
configured_dr_priority = m.groupdict()['configured_dr_priority']
# PIM6 configured DR delay: 3 secs
p9 = re.compile(r'^\s*PIM6 +configured +DR +delay: +(?P<configured_dr_delay>[\d]+) +secs$')
m = p9.match(line)
if m:
configured_dr_delay = m.groupdict()['configured_dr_delay']
# PIM6 border interface: yes
p10 = re.compile(r'^\s*PIM6 +border +interface: +(?P<border_interface>[\w]+)$')
m = p10.match(line)
if m:
bsr_border = m.groupdict()['border_interface']
# PIM6 GenID sent in Hellos: 0x26fae674
p11 = re.compile(r'^\s*PIM6 +GenID +sent +in +Hellos: +(?P<genid>[\S]+)$')
m = p11.match(line)
if m:
genid = m.groupdict()['genid']
# PIM6 Hello MD5-AH Authentication: disabled
p12 = re.compile(r'^\s*PIM6 +Hello +MD5-AH +Authentication: +(?P<md5_authentication>[\w]+)$')
m = p12.match(line)
if m:
hello_md5_ah_authentication = m.groupdict()['md5_authentication']
# PIM6 Neighbor policy: v4neighbor-policy
p13 = re.compile(r'^\s*PIM6 +Neighbor +policy: +(?P<nbr_policy>(?!none +configured)[\w\-\s]+)$')
m = p13.match(line)
if m:
neighbor_filter = m.groupdict()['nbr_policy']
# PIM6 Join-Prune inbound policy: v4jp-policy
p14 = re.compile(r'^\s*PIM6 +Join-Prune +inbound +policy: +(?P<jp_inbound_policy>(?!none)[\w\-\s]+)$')
m = p14.match(line)
if m:
jp_inbound_policy = m.groupdict()['jp_inbound_policy']
# PIM6 Join-Prune outbound policy: v4jp-policy
p15 = re.compile(r'^\s*PIM6 +Join-Prune +outbound +policy: +(?P<jp_outbound_policy>(?!none)[\w\-\s]+)$')
m = p15.match(line)
if m:
jp_outbound_policy = m.groupdict()['jp_outbound_policy']
# PIM6 Join-Prune interval: 1 minutes
p16 = re.compile(r'^\s*PIM6 +Join-Prune +interval: +(?P<jp_interval>[\d]+) +minutes$')
m = p16.match(line)
if m:
jp_interval = m.groupdict()['jp_interval']
# PIM6 Join-Prune next sending: 1 minutes
p17 = re.compile(r'^\s*PIM6 +Join-Prune +next +sending: +(?P<jp_next_sending>[\d]+) +minutes$')
m = p17.match(line)
if m:
jp_next_sending = m.groupdict()['jp_next_sending']
# PIM6 BFD enabled: no
p18 = re.compile(r'^\s*PIM6 +BFD +enabled: +(?P<bfd_enabled>[\w]+)$')
m = p18.match(line)
if m:
bfd = m.groupdict()['bfd_enabled']
# PIM6 passive interface: no
p19 = re.compile(r'^\s*PIM(6)? +passive +interface: +(?P<passive>[\w]+)$')
m = p19.match(line)
if m:
passive = m.groupdict()['passive']
# PIM6 VPC SVI: no
p20 = re.compile(r'^\s*PIM6 +VPC +SVI: +(?P<vpc_svi>[\w]+)$')
m = p20.match(line)
if m:
vpc_svi = m.groupdict()['vpc_svi']
# PIM6 Auto Enabled: no
p21 = re.compile(r'^\s*PIM(6)? +Auto +Enabled: +(?P<auto_enabled>[\w]+)$')
m = p21.match(line)
if m:
auto_enabled = m.groupdict()['auto_enabled']
# PIM6 Interface Statistics, last reset: never
# PIM6 Interface Statistics
p22 = re.compile(r'^\s*PIM6 +Interface +Statistics+(, +last +reset: +(?P<last_reset>[\w\:]+))?$')
m = p22.match(line)
if m:
statistic = True
if m.groupdict()['last_reset']:
last_rest = m.groupdict()['last_reset']
# Hellos: 360/474 (early: 0), JPs: 0/0, Asserts: 0/0
p23 = re.compile(r'^\s*Hellos: +(?P<hellos>[\d\/]+)'
'( +\(early: +(?P<early>\d+)\))?,'
' +JPs: +(?P<jps>[\d\/]+),'
' +Asserts: +(?P<asserts>[\d\/]+)$')
m = p23.match(line)
if m:
hellos = m.groupdict()['hellos']
jps = m.groupdict()['jps']
asserts = m.groupdict()['asserts']
# Grafts: 0/0, Graft-Acks: 0/0
p24 = re.compile(r'^\s*Grafts: +(?P<grafts>[\d\/]+),'
' +Graft-Acks: +(?P<graft_acks>[\d\/]+)$')
m = p24.match(line)
if m:
grafts = m.groupdict()['grafts']
graft_acks = m.groupdict()['graft_acks']
# DF-Offers: 0/0, DF-Winners: 0/0, DF-Backoffs: 0/0, DF-Passes: 0/0
p25 = re.compile(r'^\s*DF-Offers: +(?P<df_offers>[\d\/]+),'
' +DF-Winners: +(?P<df_winners>[\d\/]+),'
' +DF-Backoffs: +(?P<df_backoffs>[\d\/]+),'
' +DF-Passes: +(?P<df_passes>[\d\/]+)$')
m = p25.match(line)
if m:
df_offers = m.groupdict()['df_offers']
df_winners = m.groupdict()['df_winners']
df_backoffs = m.groupdict()['df_backoffs']
df_passes = m.groupdict()['df_passes']
# Checksum errors: 0, Invalid packet types/DF subtypes: 0/0
p26 = re.compile(r'^\s*Checksum errors: +(?P<checksum>[\d]+),'
' +Invalid +packet +types\/DF +subtypes:'
' +(?P<invalid_packet_types>[\d]+)/(?P<invalid_df_subtypes>[\d]+)$')
m = p26.match(line)
if m:
checksum = m.groupdict()['checksum']
invalid_packet_types = m.groupdict()['invalid_packet_types']
invalid_df_subtypes = m.groupdict()['invalid_df_subtypes']
# Authentication failed: 0
p27 = re.compile(r'^\s*Authentication +failed: +(?P<authentication_failed>[\d]+)$')
m = p27.match(line)
if m:
authentication_failed = m.groupdict()['authentication_failed']
# Packet length errors: 0, Bad version packets: 0, Packets from self: 0
p28 = re.compile(r'^\s*Packet +length +errors: +(?P<packet_length_errors>[\d]+),'
' +Bad +version +packets: +(?P<bad_version_packets>[\d]+),'
' +Packets +from +self: +(?P<packets_from_self>[\d]+)$')
m = p28.match(line)
if m:
packet_length_errors = m.groupdict()['packet_length_errors']
bad_version_packets = m.groupdict()['bad_version_packets']
packets_from_self = m.groupdict()['packets_from_self']
# Packets from non-neighbors: 0
p29 = re.compile(r'^\s*Packets +from +non\-neighbors: +(?P<packets_from_non_neighbors>[\d]+)$')
m = p29.match(line)
if m:
packets_from_non_neighbors = m.groupdict()['packets_from_non_neighbors']
# Packets received on passiveinterface: 0
p30 = re.compile(r'^\s*Packets +received +on +passiveinterface:'
' +(?P<packets_received_on_passiveinterface>[\d]+)$')
m = p30.match(line)
if m:
packets_received_on_passiveinterface = m.groupdict()['packets_received_on_passiveinterface']
# JPs received on RPF-interface: 0
p31 = re.compile(r'^\s*JPs +received +on +RPF\-interface:'
' +(?P<jp_received_on_rpf_interface>[\d]+)$')
m = p31.match(line)
if m:
jps_received_on_rpf_interface = m.groupdict()['jp_received_on_rpf_interface']
# (*,G) Joins received with no/wrong RP: 0/0
p32 = re.compile(r'^\s*(?P<s_g>[\w\(\)\,\*]+) +Joins +received +with +no\/+wrong RP:'
' +(?P<joins_received_with_no_rp>\d+)/(?P<joins_received_with_wrong_rp>\d+)$')
m = p32.match(line)
if m:
joins_received_with_no_rp = m.groupdict()['joins_received_with_no_rp']
joins_received_with_wrong_rp = m.groupdict()['joins_received_with_wrong_rp']
# (*,G)/(S,G) JPs received for SSM/Bidir groups: 0/0
p33 = re.compile(r'^\s*(?P<s_g>[\w\(\)\,\*\/]+) +JPs +received +for +SSM\/Bidir +groups:'
' +(?P<joins_received_with_ssm_groups>\d+)/(?P<joins_received_with_bidir_groups>\d+)$')
m = p33.match(line)
if m:
joins_received_with_ssm_groups = m.groupdict()['joins_received_with_ssm_groups']
joins_received_with_bidir_groups = m.groupdict()['joins_received_with_bidir_groups']
# JPs filtered by inbound policy: 0
p34 = re.compile(r'^\s*JPs +filtered +by +inbound +policy:'
' +(?P<jps_filtered_by_inbound_policy>\d+)$')
m = p34.match(line)
if m:
jps_filtered_by_inbound_policy = m.groupdict()['jps_filtered_by_inbound_policy']
# JPs filtered by outbound policy: 0
p35 = re.compile(r'^\s*JPs +filtered +by +outbound +policy:'
' +(?P<jps_filtered_by_outbound_policy>\d+)$')
m = p35.match(line)
if m:
jps_filtered_by_outbound_policy = m.groupdict()['jps_filtered_by_outbound_policy']
if vrf and interface_name:
if 'vrf' not in parsed_dict:
parsed_dict['vrf'] = {}
if vrf not in parsed_dict['vrf']:
parsed_dict['vrf'][vrf] = {}
if 'interfaces' not in parsed_dict['vrf'][vrf]:
parsed_dict['vrf'][vrf]['interfaces'] = {}
if interface_name not in parsed_dict['vrf'][vrf]['interfaces']:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] = {}
if 'address_family' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family']:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] = {}
if oper_status:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['oper_status'] = oper_status
if link_status:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['link_status'] = link_status
if admin_status:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['admin_status'] = admin_status
if address_list:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['address'] = address_list
if dr_address:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['dr_address'] = dr_address
if dr_priority:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['dr_priority'] = int(dr_priority)
if configured_dr_priority:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['configured_dr_priority'] = int(configured_dr_priority)
if configured_dr_delay:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['dr_delay'] = int(configured_dr_delay)
if nbr_count:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['neighbor_count'] = int(nbr_count)
if hello_interval:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['hello_interval'] = int(hello_interval)
if hello_expiration:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['hello_expiration'] = hello_expiration
if neighbor_holdtime:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['neighbor_holdtime'] = int(neighbor_holdtime)
if dr_priority:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['dr_priority'] = int(dr_priority)
if bsr_border:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['bsr_border'] = True if bsr_border.lower() == 'yes' else False
if genid:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['genid'] = genid
if hello_md5_ah_authentication:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['hello_md5_ah_authentication'] = hello_md5_ah_authentication
if neighbor_filter:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['neighbor_filter'] = neighbor_filter
if jp_inbound_policy:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['jp_inbound_policy'] = jp_inbound_policy
if jp_outbound_policy:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['jp_outbound_policy'] = jp_outbound_policy
if jp_interval:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['jp_interval'] = int(jp_interval)
if jp_next_sending:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['jp_next_sending'] = int(jp_next_sending)
if bfd:
if 'bfd' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]\
['address_family'][af_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family']\
[af_name]['bfd'] = {}
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['bfd']['enable'] = True if bfd.lower() == 'yes' else False
if passive:
if 'sm' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]\
['address_family'][af_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family']\
[af_name]['sm'] = {}
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['sm']['passive'] = True if passive.lower() == 'yes' else False
if vpc_svi:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['vpc_svi'] = True if vpc_svi.lower() == 'yes' else False
if auto_enabled:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['auto_enabled'] = True if auto_enabled.lower() == 'yes' else False
if hellos or jps or asserts or grafts or graft_acks or df_backoffs or df_passes\
or df_winners or df_offers:
if 'statistics' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]\
['address_family'][af_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics'] = {}
if 'general' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']['general'] = {}
if last_rest:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']['last_reset'] = last_rest
if hellos:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['statistics']['general']['hellos'] = hellos
if jps:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['jps'] = jps
if asserts:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['asserts'] = asserts
if df_offers:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['df_offers'] = df_offers
if graft_acks:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['graft_acks'] = graft_acks
if grafts:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['grafts'] = grafts
if df_backoffs:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['df_backoffs'] = df_backoffs
if df_passes:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['df_passes'] = df_passes
if df_winners:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['df_winners'] = df_winners
if checksum or invalid_packet_types or invalid_df_subtypes or authentication_failed\
or packet_length_errors or bad_version_packets or packets_from_self or \
packets_from_non_neighbors or packets_received_on_passiveinterface or \
jps_received_on_rpf_interface or joins_received_with_bidir_groups or \
joins_received_with_no_rp or joins_received_with_ssm_groups or joins_received_with_wrong_rp or\
jps_filtered_by_inbound_policy or jps_filtered_by_outbound_policy:
if 'statistics' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]\
['address_family'][af_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics'] = {}
if 'errors' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']['errors'] = {}
if checksum:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']['errors']['checksum'] = int(checksum)
if invalid_df_subtypes:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['statistics']['errors']['invalid_df_subtypes'] = int(invalid_df_subtypes)
if invalid_packet_types:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['invalid_packet_types'] = int(invalid_packet_types)
if authentication_failed:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['authentication_failed'] = int(authentication_failed)
if packet_length_errors:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['packet_length_errors'] = int(packet_length_errors)
if bad_version_packets:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['bad_version_packets'] = int(bad_version_packets)
if packets_from_self:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['packets_from_self'] = int(packets_from_self)
if packets_from_non_neighbors:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['packets_from_non_neighbors'] = int(packets_from_non_neighbors)
if packets_received_on_passiveinterface:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['packets_received_on_passiveinterface']\
= int(packets_received_on_passiveinterface)
if jps_received_on_rpf_interface:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['jps_received_on_rpf_interface'] = int(jps_received_on_rpf_interface)
if joins_received_with_bidir_groups:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['joins_received_with_bidir_groups'] = int(joins_received_with_bidir_groups)
if joins_received_with_no_rp:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['joins_received_with_no_rp'] = int(joins_received_with_no_rp)
if joins_received_with_ssm_groups:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['joins_received_with_ssm_groups'] = int(joins_received_with_ssm_groups)
if joins_received_with_wrong_rp:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['joins_received_with_wrong_rp'] = int(joins_received_with_wrong_rp)
if jps_filtered_by_inbound_policy:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['jps_filtered_by_inbound_policy'] = int(jps_filtered_by_inbound_policy)
if jps_filtered_by_outbound_policy:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['jps_filtered_by_outbound_policy'] = int(jps_filtered_by_outbound_policy)
continue
return parsed_dict
# =====================================================
# schema for 'show ip/ipv6 pim rp [vrf <WORD>]'
# =====================================================
class ShowPimRpSchema(MetaParser):
"""Schema for:
show ip pim rp
show ip pim rp vrf <vrf>
show ipv6 pim rp
show ipv6 pim rp vrf <vrf>"""
schema = {
'vrf':{
Any():{
'address_family':{
Any():{
Optional('rp'):{
Optional('static_rp'):{
Any(): {
Optional('sm'): {
'policy_name': str,
Optional('route_map'): str,
},
Optional('bidir'): {
'policy_name': str,
Optional('route_map'): str,
},
},
},
Optional('bsr'):{
Optional('bsr_candidate'):{
'priority': int,
'hash_mask_length': int,
'address': str,
},
Optional('bsr'):{
'priority': int,
'hash_mask_length': int,
'address': str,
Optional('up_time'): str,
Optional('expires'): str,
},
Optional('rp'):{
'up_time': str,
'group_policy': str,
Optional('rp_address'): str,
},
Optional('bsr_address'): {
Any():{
'priority': int,
'mode': str,
'address': str,
'policy': str,
},
},
Optional('bsr_next_bootstrap'): str,
Optional('rp_candidate_policy'): str,
Optional('rp_policy'): str,
Optional('rp_candidate_next_advertisement'): str,
},
Optional('autorp'): {
Optional('announce_policy'): str,
Optional('discovery_policy'): str,
Optional('address'): str,
Optional('bsr_next_discovery'): str,
Optional('send_rp_announce'): {
Optional('group'): str,
Optional('scope'): int,
Optional('group_list'): str,
Optional('bidir'): bool,
Optional('rp_source'): str,
},
},
Optional('rp_list'): {
Any(): {
Optional('address'): str,
Optional('info_source_address'): str,
Optional('info_source_type'): str,
Optional('up_time'): str,
Optional('expiration'): str,
Optional('df_ordinal'): int,
Optional('priority'): int,
Optional('mode'): str,
Optional('group_ranges'): str,
},
},
Optional('rp_mappings'): {
Any(): {
'group': str,
'rp_address': str,
'protocol': str,
'up_time': str,
Optional('expiration'): str,
},
}
},
Optional('sm'): {
Optional('asm'): {
Optional('anycast_rp'): {
Any(): {
Optional('anycast_address'): str,
}
},
},
},
},
},
},
},
}
# ==========================================================
# parser for 'show ip/ipv6 pim rp [vrf <vrf>]'
# ==========================================================
class ShowPimRp(ShowPimRpSchema):
"""Parser for:
show <address_family> pim rp
show <address_family> pim rp vrf <vrf>"""
cli_command = ['show {af} pim rp vrf {vrf}','show {af} pim rp']
def cli(self, af='ip', vrf='', output=None):
if vrf:
cmd = self.cli_command[0].format(af=af,vrf=vrf)
else:
cmd = self.cli_command[1].format(af=af)
if output is None:
out = self.device.execute(cmd)
else:
out = output
af_name = 'ipv4' if af == 'ip' else af
# Init dictionary
parsed_output = dict()
vrf_name = bsr = None
anycast_rp_members_list = []
flag = False
connection_flag = False
for line in out.splitlines():
line = line.rstrip()
# PIM6 RP Status Information for VRF "VRF1"
# PIM RP Status Information for VRF "VRF1"
p1 = re.compile(r'^\s*(PIM6|PIM) +RP +Status +Information +for +VRF +\"(?P<vrf_name>[\w\S]+)\"$')
m = p1.match(line)
if m:
vrf_name = m.groupdict()['vrf_name']
anycast_rp_members_list = []
flag = False
connection_flag = False
bsr = None
auto_rp_address = None
continue
# BSR: Not Operational
# BSR: 10.1.5.1*, next Bootstrap message in: 00:00:01,
p2 = re.compile(r'^\s*BSR: +(?P<bsr>[\w\S]+)'
'(, +next +Bootstrap +message +in: +(?P<next_bsr_message>[\w\S]+),)?$')
m = p2.match(line)
if m:
bsr = m.groupdict()['bsr'].replace('*','')
if 'not' in bsr.lower() or 'none' in bsr.lower():
bsr = None
continue
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['address_family']:
parsed_output['vrf'][vrf_name]['address_family'][af_name] = {}
if 'rp' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] = {}
if 'bsr' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']['bsr'] = {}
if m.groupdict()['next_bsr_message']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['bsr_next_bootstrap'] = m.groupdict()['next_bsr_message']
if 'bsr_candidate' not in parsed_output['vrf'][vrf_name]['address_family']\
[af_name]['rp']['bsr']:
parsed_output['vrf'][vrf_name]['address_family'] \
[af_name]['rp']['bsr']['bsr_candidate'] = {}
if 'bsr' not in parsed_output['vrf'][vrf_name]['address_family']\
[af_name]['rp']['bsr']:
parsed_output['vrf'][vrf_name]['address_family'] \
[af_name]['rp']['bsr']['bsr'] = {}
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['bsr']['address'] = bsr
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['bsr_candidate']['address'] = bsr
continue
# BSR: 10.1.5.5, uptime: 18:04:20, expires: 00:01:50,
p2_1 = re.compile(r'^\s*BSR(\:)? +(?P<bsr_candidate>[\w\s\.\*\:]+)'
', +uptime: +(?P<bsr_uptime>[\w\.\:]+)'
', +expires: +(?P<bsr_expires>[\w\.\:]+),$')
m = p2_1.match(line)
if m:
bsr_candidate = m.groupdict()['bsr_candidate'].replace('*', '')
bsr_candidate_uptime = m.groupdict()['bsr_uptime']
bsr_candidate_expires = m.groupdict()['bsr_expires']
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['address_family']:
parsed_output['vrf'][vrf_name]['address_family'][af_name] = {}
if 'rp' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] = {}
if 'bsr' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']['bsr'] = {}
if 'bsr_candidate' not in parsed_output['vrf'][vrf_name]['address_family']\
[af_name]['rp']['bsr']:
parsed_output['vrf'][vrf_name]['address_family'] \
[af_name]['rp']['bsr']['bsr_candidate'] = {}
if 'bsr' not in parsed_output['vrf'][vrf_name]['address_family']\
[af_name]['rp']['bsr']:
parsed_output['vrf'][vrf_name]['address_family'] \
[af_name]['rp']['bsr']['bsr'] = {}
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['bsr']['address'] = bsr_candidate
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['bsr_candidate']['address'] = bsr_candidate
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['bsr']['up_time'] = bsr_candidate_uptime
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['bsr']['expires'] = bsr_candidate_expires
continue
# Auto-RP disabled
p3 = re.compile(r'^\s*Auto-RP +disabled$')
m = p3.match(line)
if m:
continue
# Auto-RP RPA: 10.229.11.11*, next Discovery message in: 00:00:15
p3 = re.compile(r'^\s*Auto-RP RPA: +(?P<auto_rp_address>[\w\S]+),'
' +next +[d|D]iscovery +message +in: +(?P<next_discory_message>[\w\S]+)$')
m = p3.match(line)
if m:
auto_rp_address = m.groupdict()['auto_rp_address'].replace('*','')
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['address_family']:
parsed_output['vrf'][vrf_name]['address_family'][af_name] = {}
if 'rp' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] = {}
if 'autorp' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']['autorp'] = {}
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['autorp']['address'] = auto_rp_address
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']\
['autorp']['bsr_next_discovery'] = m.groupdict()['next_discory_message']
continue
# BSR RP Candidate policy: None
p4 = re.compile(r'^\s*BSR +RP +Candidate +policy: +(?P<bsr_rp_candidate_policy>[\w\S]+)$')
m = p4.match(line)
if m and bsr:
rp_candidate_policy = None if m.groupdict()['bsr_rp_candidate_policy'].lower() == 'none' \
else m.groupdict()['bsr_rp_candidate_policy']
if rp_candidate_policy:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['rp_candidate_policy'] = rp_candidate_policy
continue
# BSR RP policy: None
p5 = re.compile(r'^\s*BSR +RP +policy: +(?P<bsr_rp_policy>[\w\S]+)$')
m = p5.match(line)
if m and bsr:
rp_policy = "" if m.groupdict()['bsr_rp_policy'].lower() == 'none' \
else m.groupdict()['bsr_rp_policy']
if rp_policy:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['rp_policy'] = rp_policy
continue
# Auto-RP Announce policy: None
p6 = re.compile(r'^\s*Auto\-RP +Announce +policy: +(?P<auto_rp_announce_policy>[\w\S]+)$')
m = p6.match(line)
if m and auto_rp_address:
announce_policy = "" if m.groupdict()['auto_rp_announce_policy'].lower() \
== 'none' else m.groupdict()['auto_rp_announce_policy']
if 'autorp' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']['autorp'] = {}
if announce_policy:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']\
['autorp']['announce_policy'] = announce_policy
continue
# Auto-RP Discovery policy: None
p7 = re.compile(r'^\s*Auto\-RP +Discovery +policy: +(?P<auto_rp_discovery_policy>[\w\S]+)$')
m = p7.match(line)
if m and auto_rp_address:
discovery_policy = "" if m.groupdict()['auto_rp_discovery_policy'].lower() == 'none' else \
m.groupdict()['auto_rp_discovery_policy']
if discovery_policy:
if 'autorp' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']['autorp'] = {}
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']\
['autorp']['discovery_policy'] = discovery_policy
continue
# Anycast-RP 10.111.111.111 members:
p11 = re.compile(r'^\s*Anycast\-RP +(?P<anycast_rp>[\w\d\S]+) +members:$')
m = p11.match(line)
if m:
anycast_rp = m.groupdict()['anycast_rp']
flag = True
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['address_family']:
parsed_output['vrf'][vrf_name]['address_family'][af_name] = {}
continue
# 10.1.2.1* 10.1.5.1
p11_1 = re.compile(r'^(?P<anycast_rp_members>[\w\.\:\*\s]+)$')
m = p11_1.match(line)
if m and flag:
anycast_rp_members_list = m.groupdict()['anycast_rp_members'].split()
for member in anycast_rp_members_list:
anycast_member = anycast_rp +" "+ member.replace('*','')
if 'sm' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'] = {}
if 'asm' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm']['asm'] = {}
if 'anycast_rp' not in parsed_output['vrf'][vrf_name]['address_family']\
[af_name]['sm']['asm']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm']\
['asm']['anycast_rp'] = {}
if anycast_member not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm']\
['asm']['anycast_rp']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm']\
['asm']['anycast_rp'][anycast_member] = {}
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm']\
['asm']['anycast_rp'][anycast_member]['anycast_address'] = anycast_rp
flag = False
continue
# RP: 10.21.33.33, (0), uptime: 03:52:52, expires: never,
p8 = re.compile(r'^\s*RP: +(?P<rp>[\w\d\S]+), +\(+(?P<df_ordinal>[\d\S]+)+\),'
' +uptime: +(?P<uptime>[\w\.\:]+),'
' +expires: +(?P<expires>[\w\d\S][^,]+)(?P<comma>[\,]+)?$')
m = p8.match(line)
if m:
rp_dict = parsed_output.setdefault('vrf', {}).setdefault(vrf_name, {})\
.setdefault('address_family', {}).setdefault(af_name, {}).setdefault('rp', {})
rp_address = m.groupdict()['rp'].replace('*','')
df_ordinal = m.groupdict()['df_ordinal']
uptime = m.groupdict()['uptime']
expires = m.groupdict()['expires']
connection_flag = False
code = None
continue
# RP: 10.115.55.51, (0),
p8_1 = re.compile(r'^\s*RP: +(?P<rp>[\w\d\S]+), +\(+(?P<df_ordinal>[\d\S]+)+\),$')
m = p8_1.match(line)
if m:
rp_dict = parsed_output.setdefault('vrf', {}).setdefault(vrf_name, {})\
.setdefault('address_family', {}).setdefault(af_name, {}).setdefault('rp', {})
rp_address = m.groupdict()['rp'].replace('*','')
df_ordinal = m.groupdict()['df_ordinal']
connection_flag = False
code = None
continue
# uptime: 1d13h priority: 255,
# uptime: 2d21h, (A) (B), priority: 192
p8_2 = re.compile(r'^\s*uptime: +(?P<uptime>[\w\.\:]+),?'
'( +(?P<modes>( *\(\w\)){0,3}),)? '
'+priority: +(?P<priority>\d+),?$')
m = p8_2.match(line)
if m:
uptime = m.groupdict()['uptime']
priority = int(m.groupdict()['priority'])
continue
# RP-source: (local),
# RP-source: (local), group-map: BIDIR_SPARSE1,
p8_3 = re.compile(r'^\s*RP\-source:( +(?P<rp_source>\S+))? +\(+(?P<info_source_type>\w+)+\),'
' *(group-map: +(?P<route_map>[\w\-]+),)?$')
m = p8_3.match(line)
if m:
code = None
rp_sources = [m.groupdict()['rp_source']]
route_map = m.groupdict()['route_map']
info_source_type = m.groupdict()['info_source_type']
if info_source_type.lower() == 'local':
info_source_type_conversions = ['static']
if info_source_type.lower() == 'b':
info_source_type_conversions = ['bootstrap']
if info_source_type.lower() == 'a':
info_source_type_conversions = ['autorp']
continue
# RP-source: 10.16.2.2 (A), 10.16.2.2 (B),
# RP-source: 10.144.6.6 (A), 10.16.2.2 (B), (local),
p8_4 = re.compile(r'^\s*RP\-source: +(?P<rp_source>\S+) +\(+(?P<info_source_type>\w+)+\),')
m = p8_4.match(line)
if m:
p = re.compile(r'(?P<rp_source>\S+) +(?P<info_source_type>[\w\(\)\,\s]+),')
m = p.findall(line)
info_source_type_conversions = []
rp_sources = []
for rp_source, info_source_type in m:
conversions = []
if 'local' in info_source_type.lower():
conversions.append('static')
if '(b)' in info_source_type.lower():
conversions.append('bootstrap')
if '(a)' in info_source_type.lower():
conversions.append('autorp')
rp_sources.extend([rp_source] * len(conversions))
info_source_type_conversions.extend(conversions)
code = None
continue
# group ranges:
line = line.strip()
p8_4 = re.compile(r'^group +ranges:$')
m = p8_4.match(line)
if m:
route_map = None
connection_flag = True
continue
# priority: 0, RP-source: (local), group ranges:
# priority: 92, RP-source: 10.1.5.1 (B), group ranges:
# priority: 0, RP-source: (local), group-map: PIM6-STATIC-RP, group ranges:
p9 = re.compile(r'^\s*priority: +(?P<priority>\d+),'
' +RP\-source:( +(?P<rp_source>[\w\S]+))? +\(+(?P<info_source_type>\w+)+\),'
'( *group-map: +(?P<route_map>\S+),)?'
' +group +ranges:$')
m = p9.match(line)
if m:
connection_flag = True
priority = int(m.groupdict()['priority'])
rp_sources = [m.groupdict()['rp_source']]
route_map = m.groupdict()['route_map']
info_source_type = m.groupdict()['info_source_type']
if info_source_type.lower() == 'local':
info_source_type_conversions = ['static']
code = 'static'
if info_source_type.lower() == 'b':
info_source_type_conversions = ['bootstrap']
code = 'bootstrap'
if info_source_type.lower() == 'a':
info_source_type_conversions = ['autorp']
code = 'autorp'
continue
# 224.0.0.0/4
# 233.0.0.0/24 (bidir)
# ff1e::3002/128 ff1e::3001/128
# 226.0.0.0/8 (bidir) , expires: 00:02:24 (A)
p10 = re.compile(r'^\s*(?P<group_ranges>[\w\/\.\:\s]+)'
'( +\((?P<bidir>\w+)\))?'
'( *, *expires: (?P<expires>[\w\.\:]+)( *\((?P<code>\w+)\))?)?$')
m = p10.match(line)
if m and connection_flag:
expire_dict = {}
group_ranges = m.groupdict()['group_ranges'].strip()
if m.groupdict()['bidir'] and 'bidir' in m.groupdict()['bidir'].lower():
mode = 'BIDIR'
else:
mode = 'SM'
try:
expires = m.groupdict()['expires'] or expires
except Exception:
expires = None
try:
code = m.groupdict()['code'] or code
if not code:
code = 'static'
except Exception:
code = None
if code and 'b' == code.lower():
code = 'bootstrap'
elif code and 'a' == code.lower():
code = 'autorp'
# rp_list dict
rp_list_d = rp_dict.setdefault('rp_list', {})
for info_source_type_conversion, rp_source in zip(info_source_type_conversions, rp_sources):
rp_address_source_type = rp_address + " " + mode + ' ' + info_source_type_conversion
rp_list_dict = rp_list_d.setdefault(rp_address_source_type, {})
rp_list_dict['address'] = rp_address
rp_list_dict['info_source_type'] = info_source_type_conversion
if rp_source:
rp_list_dict['info_source_address'] = rp_source
rp_list_dict['up_time'] = uptime
if expires:
rp_list_dict['expiration'] = expires
if df_ordinal:
rp_list_dict['df_ordinal'] = int(df_ordinal)
if priority:
rp_list_dict['priority'] = priority
rp_list_dict['mode'] = mode
# append to the list if group-ranges have many entries
group_range = rp_list_dict.get('group_ranges', '')
group_range = set(group_range.split())
group_range.add(group_ranges)
rp_list_dict['group_ranges'] = ' '.join(sorted(group_range))
# static
if info_source_type_conversion == 'static':
s_mode = mode.lower()
static_rp_dict = rp_dict.setdefault('static_rp', {})\
.setdefault(rp_address, {}).setdefault(s_mode, {})
static_rp_dict['policy_name'] = group_ranges
if route_map:
static_rp_dict['route_map'] = route_map
# autorp
if info_source_type_conversion == 'autorp':
autorp_dict = rp_dict.setdefault('autorp', {}).setdefault('send_rp_announce', {})
if rp_source:
autorp_dict['rp_source'] = rp_source
autorp_dict['scope'] = int(df_ordinal)
autorp_dict['bidir'] = True if mode == 'BIDIR' else False
autorp_dict['group_list'] = group_ranges
autorp_dict['group'] = group_ranges.split('/')[0]
# rp_mappings
key = group_ranges + ' ' + rp_address + ' ' + info_source_type_conversion
rp_mappings_dict = rp_dict.setdefault('rp_mappings', {}).setdefault(key, {})
rp_mappings_dict['rp_address'] = rp_address
rp_mappings_dict['protocol'] = info_source_type_conversion
rp_mappings_dict['group'] = group_ranges
rp_mappings_dict['up_time'] = uptime
if expires and code and code in info_source_type_conversion:
rp_mappings_dict['expiration'] = expires
# rp bsr bsr_rp_candidate_address
if info_source_type_conversion == 'bootstrap' and rp_source:
bsr_dict = rp_dict.setdefault('bsr', {}).setdefault('bsr_address', {}).setdefault(rp_source, {})
bsr_dict['address'] = rp_source
# append to the list if group-ranges have many entries
group_range = bsr_dict.get('policy', '')
group_range = set(group_range.split())
group_range.add(group_ranges)
bsr_dict['policy'] = ' '.join(sorted(group_range))
bsr_dict['mode'] = mode
bsr_dict['priority'] = priority
if expires:
rp_dict.setdefault('bsr', {}).setdefault('rp_candidate_next_advertisement', expires)
# rp bsr rp
if info_source_type_conversion == 'bootstrap':
bsr_rp_dict = rp_dict.setdefault('bsr', {}).setdefault('rp', {})
if rp_source:
bsr_rp_dict['rp_address'] = rp_source
# append to the list if group-ranges have many entries
group_range = bsr_rp_dict.get('group_policy', '')
group_range = set(group_range.split())
group_range.add(group_ranges)
bsr_rp_dict['group_policy'] = ' '.join(sorted(group_range))
bsr_rp_dict['up_time'] = uptime
continue
# priority: 111, hash-length: 30
p13 = re.compile(r'^\s*priority: +(?P<priority>\d+),'
' +hash-length: +(?P<hash_length>\d+)$')
m = p13.match(line)
if m:
bsr_prioprity = int(m.groupdict()['priority'])
bsr_hash_length = int(m.groupdict()['hash_length'])
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']\
['bsr']['bsr']['priority'] = bsr_prioprity
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp']\
['bsr']['bsr']['hash_mask_length'] = bsr_hash_length
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['bsr_candidate']['priority'] = bsr_prioprity
parsed_output['vrf'][vrf_name]['address_family'][af_name]['rp'] \
['bsr']['bsr_candidate']['hash_mask_length'] = bsr_hash_length
continue
return parsed_output
# ==========================================================
# parser for 'show ipv6 pim rp [vrf <vrf>]'
# ==========================================================
class ShowIpv6PimRp(ShowPimRp):
"""Parser for:
show ipv6 pim rp
show ipv6 pim rp vrf <vrf>"""
def cli(self, vrf='',output=None):
return super().cli(af='ipv6', vrf=vrf, output=output)
# ==========================================================
# parser for 'show ip pim rp [vrf <vrf>]'
# ==========================================================
class ShowIpPimRp(ShowPimRp):
"""Parser for:
show ip pim rp
show ip pim rp vrf <vrf>"""
def cli(self, vrf='',output=None):
return super().cli(af='ip', vrf=vrf, output=output)
# ==============================================
# schema Parser for 'show ipv6 pim df vrf all'
# ==============================================
class ShowIpv6PimDfSchema(MetaParser):
"""Schema for:
show ipv6 pim df
show ipv6 pim df vrf <vrf>"""
schema = {
'vrf': {
Any(): {
'address_family': {
Any(): {
Optional('rp'): {
Optional('bidir'): {
Optional('interface_df_election'): {
Optional(Any()): {
Optional('address'): str,
Optional('df_ordinal'): int,
Optional('metric_pref'): int,
Optional('metric'): int,
Optional('group_range'): str,
Optional('interface_name'): str,
Optional('df_address'): str,
Optional('df_uptime'): str,
Optional('interface_state'): str,
Optional('winner_metric_pref'): int,
Optional('winner_metric'): int,
},
},
},
},
},
},
},
},
}
# ==========================================================
# parser for show ipv6 pim df vrf all
#
# ==========================================================
class ShowIpv6PimDf(ShowIpv6PimDfSchema):
"""Parser for:
show ipv6 pim df
show ipv6 pim df vrf <vrf>"""
cli_command = ['show ipv6 pim df vrf {vrf}', 'show ipv6 pim df']
def cli(self, vrf='', output=None):
if vrf:
cmd = self.cli_command[0].format(vrf=vrf)
else:
cmd = self.cli_command[1]
if output is None:
out = self.device.execute(cmd)
else:
out = output
af_name = 'ipv6'
# Init dictionary
parsed_dict = dict()
vrf = ordinal = df_uptime = df_address = interface = rp_address = ""
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
# Bidir-PIM6 Designated Forwarder Information for VRF "default"
p1 = re.compile(r'^\s*Bidir-PIM6 +Designated +Forwarder'
' +Information +for +VRF \"(?P<vrf>[\w]+)\"$')
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
ordinal = df_uptime = df_address = interface = rp_address = ""
# RP Address (ordinal) RP Metric Group Range
# 2001:db8:1:1::1 (8)
p2 = re.compile(r'^\s*(?P<rp_address>[\w\:\.]+) +\((?P<ordinal>\d+)\)$')
m = p2.match(line)
if m:
metrics_pref = metrics = group_range = interface = df_address = interface_state \
= winner_metrics_pref = winner_metrics = df_uptime = ""
rp_address = m.groupdict()['rp_address']
ordinal = m.groupdict()['ordinal']
# [0/0] ff09::/16
p3 = re.compile(r'^\s*(?P<space>\s{23})'
'\[(?P<metrics_pref>[\d\-]+)/(?P<metrics>[\d\-]+)\] +(?P<group_range>[\w\.\:\/]+)$')
m = p3.match(line)
if m:
metrics_pref = m.groupdict()['metrics_pref']
metrics = m.groupdict()['metrics']
group_range = m.groupdict()['group_range']
# Interface DF Address DF State DF Metric DF Uptime
# Eth2/1 fe80::5054:ff:fe89:740c Winner [0/0] 00:00:48
p4 = re.compile(r'^\s*(?P<interface>[\w\-\/]+) +(?P<df_address>[\S]+)'
' +(?P<df_state>\w+)'
' +\[(?P<winner_metrics_pref>[\d\-]+)/(?P<winner_metrics>[\d\-]+)\]'
' +(?P<df_uptime>\S+)$')
m = p4.match(line)
if m:
interface = Common.convert_intf_name(m.groupdict()['interface'])
df_address = m.groupdict()['df_address']
interface_state = m.groupdict()['df_state'].lower()
if interface_state == 'winner':
interface_state = 'win'
winner_metrics_pref = m.groupdict()['winner_metrics_pref']
winner_metrics = m.groupdict()['winner_metrics']
df_uptime = m.groupdict()['df_uptime']
if vrf and rp_address:
if 'vrf' not in parsed_dict:
parsed_dict['vrf'] = {}
if vrf not in parsed_dict['vrf']:
parsed_dict['vrf'][vrf] = {}
if 'address_family' not in parsed_dict['vrf'][vrf]:
parsed_dict['vrf'][vrf]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf]['address_family']:
parsed_dict['vrf'][vrf]['address_family'][af_name] = {}
if 'rp' not in parsed_dict['vrf'][vrf]['address_family'][af_name]:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp'] = {}
if 'bidir' not in parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] = {}
if 'interface_df_election' not in parsed_dict['vrf'][vrf]['address_family'] \
[af_name]['rp']['bidir']:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp'] \
['bidir']['interface_df_election'] = {}
if rp_address and interface:
interface_df_election = rp_address + " " + interface
if interface_df_election not in parsed_dict['vrf'][vrf]['address_family'][af_name] \
['rp']['bidir']['interface_df_election']:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election] = {}
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['interface_name'] = interface
if rp_address:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['address'] = rp_address
if df_address:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['df_address'] = df_address
if interface_state:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['interface_state'] = interface_state
if ordinal:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['df_ordinal'] = int(ordinal)
if metrics_pref:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['metric_pref'] = int(metrics_pref)
if metrics:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['metric'] = int(metrics)
if group_range:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['group_range'] = group_range
if winner_metrics_pref:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election] \
['winner_metric_pref'] = int(winner_metrics_pref)
if winner_metrics:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election] \
['winner_metric'] = int(winner_metrics)
if df_uptime:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['df_uptime'] = df_uptime
return parsed_dict
# ==============================================
# schema Parser for 'show ip pim df [vrf <vrf>]'
# ==============================================
class ShowIpPimDfSchema(MetaParser):
"""Schema for:
show ip pim df
show ip pim df vrf <vrf>"""
schema = {
'vrf': {
Any(): {
'address_family': {
Any(): {
Optional('rp'): {
Optional('bidir'): {
Optional('interface_df_election'): {
Optional(Any()): {
Optional('address'): str,
Optional('df_ordinal'): int,
Optional('df_bits'): str,
Optional('metric_pref'): int,
Optional('metric'): int,
Optional('group_range'): str,
Optional('interface_name'): str,
Optional('df_address'): str,
Optional('df_uptime'): str,
Optional('interface_state'): str,
Optional('winner_metric_pref'): int,
Optional('winner_metric'): int,
Optional('is_rpf'): bool,
},
},
},
},
},
},
},
},
}
# ==========================================================
# parser for show ip pim df [vrf <vrf>]
#
# ==========================================================
class ShowIpPimDf(ShowIpPimDfSchema):
"""Parser for:
show ip pim df
show ip pim df vrf <vrf>"""
cli_command = ['show ip pim df vrf {vrf}','show ip pim df']
def cli(self, vrf="",output=None):
if output is None:
if vrf:
cmd = self.cli_command[0].format(vrf=vrf)
else:
cmd = self.cli_command[1]
out = self.device.execute(cmd)
else:
out = output
af_name = 'ipv4'
# Init dictionary
parsed_dict = dict()
vrf = ordinal = df_uptime = df_bits = df_address = interface = rp_address = is_rpf = ""
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
# Bidir-PIM Designated Forwarder Information for VRF "default"
p1 = re.compile(r'^\s*Bidir-PIM +Designated +Forwarder'
' +Information +for +VRF \"(?P<vrf>[\w]+)\"$')
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
ordinal = df_uptime = df_bits = df_address = interface = rp_address = is_rpf = ""
# RP Address (ordinal) DF-bits RP Metric Group Range
# 10.16.2.2 (2) 00000002 (1) [0/0] 224.128.0.0/9
p2 = re.compile(r'^\s*(?P<rp_address>[\d\.]+) +\((?P<ordinal>\d+)\)'
' +(?P<df_bits>[\w\s\(\)]+)'
' +\[(?P<metrics_pref>\d+)/(?P<metrics>\d+)\] +(?P<group_range>[\d\.\/]+)$')
m = p2.match(line)
if m:
rp_address = m.groupdict()['rp_address']
ordinal = m.groupdict()['ordinal']
df_bits = m.groupdict()['df_bits'].rstrip()
metrics_pref = m.groupdict()['metrics_pref']
metrics = m.groupdict()['metrics']
group_range = m.groupdict()['group_range']
# Interface DF Address DF State DF Metric DF Uptime
# Loopback0 10.4.1.1 Winner [0/0] 00:28:14 (RPF)
# Ethernet2/2 10.2.0.2 Lose [0/0] 00:28:14
p3 = re.compile(r'^\s*(?P<interface>\S+) +(?P<df_address>\S+)'
' +(?P<df_state>\w+)'
' +\[(?P<winner_metrics_pref>\d+)/(?P<winner_metrics>\d+)\]'
' +(?P<df_uptime>\S+)'
'( +\((?P<is_rpf>\S+)\))?$')
m = p3.match(line)
if m:
interface = Common.convert_intf_name(m.groupdict()['interface'])
df_address = m.groupdict()['df_address']
interface_state = m.groupdict()['df_state'].lower()
if interface_state == 'winner':
interface_state = 'win'
winner_metrics_pref = m.groupdict()['winner_metrics_pref']
winner_metrics = m.groupdict()['winner_metrics']
df_uptime = m.groupdict()['df_uptime']
if m.groupdict()['is_rpf'] and 'rpf' in m.groupdict()['is_rpf'].lower():
is_rpf = True
if vrf and rp_address:
if 'vrf' not in parsed_dict:
parsed_dict['vrf'] = {}
if vrf not in parsed_dict['vrf']:
parsed_dict['vrf'][vrf] = {}
if 'address_family' not in parsed_dict['vrf'][vrf]:
parsed_dict['vrf'][vrf]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf]['address_family']:
parsed_dict['vrf'][vrf]['address_family'][af_name] = {}
if 'rp' not in parsed_dict['vrf'][vrf]['address_family'][af_name]:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp'] = {}
if 'bidir' not in parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] = {}
if 'interface_df_election' not in parsed_dict['vrf'][vrf]['address_family'] \
[af_name]['rp']['bidir']:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp'] \
['bidir']['interface_df_election'] = {}
if rp_address and interface:
interface_df_election = rp_address + " " + interface
if interface_df_election not in parsed_dict['vrf'][vrf]['address_family'][af_name] \
['rp']['bidir']['interface_df_election']:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election] = {}
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['interface_name'] = interface
if rp_address:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['address'] = rp_address
if df_address:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['df_address'] = df_address
if interface_state:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['interface_state'] = interface_state
if is_rpf:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['is_rpf'] = is_rpf
if ordinal:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['df_ordinal'] = int(ordinal)
if df_bits:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['df_bits'] = df_bits
if metrics_pref:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['metric_pref'] = int(metrics_pref)
if metrics:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['metric'] = int(metrics)
if group_range:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['group_range'] = group_range
if winner_metrics_pref:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election] \
['winner_metric_pref'] = int(winner_metrics_pref)
if winner_metrics:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election] \
['winner_metric'] = int(winner_metrics)
if df_uptime:
parsed_dict['vrf'][vrf]['address_family'][af_name]['rp']['bidir'] \
['interface_df_election'][interface_df_election]['df_uptime'] = df_uptime
return parsed_dict
# ============================================
# schema Parser for 'show ipv6 pim route'
# schema Parser for 'show ipv6 pim route vrf <vrf>'
# ============================================
class ShowIpv6PimRouteSchema(MetaParser):
"""Schema for:
show ipv6 pim route
show ipv6 pim route vrf <vrf>"""
schema = {
'vrf':{
Any():{
'address_family':{
Any():{
Optional('topology_tree_info'): {
Any():{
Optional('group'): str,
Optional('source_address'): str,
Optional('is_rpt'): bool,
Optional('rp_bit'): bool,
Optional('expiration'): str,
Optional('incoming_interface'): str,
Optional('mode'): str,
Optional('rp_address'): str,
Optional('rpf_neighbor'): str,
Optional('jp_holdtime_roundup'): int,
Optional('oif'):str,
Optional('oif_count'):int,
Optional('oif_timeout_count'):int,
Optional('oif_timeout'):str,
Optional('immediate'): str,
Optional('immediate_count'): int,
Optional('immediate_timeout_count'):int,
Optional('immediate_timeout'): str,
Optional('timeout_interval'): int,
Optional('sgr_prune_count'): int,
Optional('sgr_prune'): str,
Optional('route_fabric_owned'): bool,
},
},
},
},
},
},
}
# ==========================================================
# parser for show ipv6 pim route
# Parser for show ipv6 pim route vrf <vrf>
# ==========================================================
class ShowIpv6PimRoute(ShowIpv6PimRouteSchema):
"""Parser for:
show ipv6 pim route
show ipv6 pim route vrf <vrf>"""
cli_command = ['show ipv6 pim route vrf {vrf}','show ipv6 pim route']
def cli(self, vrf="",output=None):
if output is None:
if vrf:
cmd = self.cli_command[0].format(vrf=vrf)
else:
cmd = self.cli_command[1]
output = self.device.execute(cmd)
else:
out = output
af_name = 'ipv6'
rp_bit = False
is_rpt = False
mode = vrf_name = group = incoming_interface = imm_timeout_bf_str= imm_count = imm_timeout_count=\
immf_bf_str = oif_bf_str = oif_timeout_bf_str = oif_count = oif_timeout_count = rp_address =\
timeout_interval = sgr_count = sgr_prune_str = jp_round_up = rpf_nbr = expires = ""
# Init dictionary
parsed_output = dict()
for line in output.splitlines():
line = line.rstrip()
# PIM6 Routing Table for VRF "VRF1" - 1 entries
p1 = re.compile(r'^\s*PIM6 +Routing +Table +for +VRF +\"(?P<vrf_name>[\S]+)\" +\-'
' +(?P<counter>\d+) +entries$')
m = p1.match(line)
if m:
vrf_name = m.groupdict()['vrf_name']
group = ""
# (*, ff08::/16), RP 2001:db8:12:12::12, bidir, expires 00:02:31 Route Fabric owned : FALSE, RP-bit
# (*, ff30::/12), expires 00:02:31 Route Fabric owned : FALSE
# (*, ff30::/12), expires 0.000000 Route Fabric owned : FALSE (00:00:02)
p2 = re.compile(r'^\s*\((?P<source_address>[\S]+)'
', +(?P<group>[\S]+)\),'
'( +RP +(?P<rp>[\S\-]+),)?( +(?P<mode>\w+),)?'
' +expires +(?P<expires>[\w\.\:\(\)\s]+)'
' +Route +Fabric +owned +: ((?P<fabric_owned>[\w]+)( *(?P<dummy>\S+))?)'
'(, +(?P<rp_bit>[\S]+))?$')
m = p2.match(line)
if m:
rp_bit = False
mode = incoming_interface = imm_timeout_bf_str = \
imm_count = imm_timeout_count = \
immf_bf_str = oif_bf_str = oif_timeout_bf_str = oif_count \
= oif_timeout_count = rp_address = \
timeout_interval = sgr_count = sgr_prune_str = jp_round_up = rpf_nbr = expires = ""
group = m.groupdict()['group']
source_address = m.groupdict()['source_address']
if source_address == '*':
is_rpt = True
else:
is_rpt = False
route_fabric_owned = m.groupdict()['fabric_owned'].lower()
expire_value = m.groupdict()['expires']
if '(' in expire_value:
p2_1 = re.compile('^\s*(?P<expires_1>[\d\S]+) +\((?P<expires_2>[\S]+)\)$')
match_1 = p2_1.match(expire_value)
if match_1:
expires = match_1.groupdict()['expires_2']
else:
expires = expire_value
if m.groupdict()['mode']:
mode = m.groupdict()['mode']
if m.groupdict()['rp_bit']:
rp_bit = True
if m.groupdict()['rp']:
rp_address = m.groupdict()['rp'].replace('*','')
# Incoming interface: Null, RPF nbr 0.0.0.0
p3 = re.compile(r'^\s*Incoming +interface: (?P<incoming_interface>[\S]+),'
' +RPF +nbr +(?P<rpf_nbr>[\S]+)$')
m = p3.match(line)
if m:
incoming_interface = m.groupdict()['incoming_interface']
rpf_nbr = m.groupdict()['rpf_nbr']
# Oif-list: (0) 00000000, timeout-list: (0) 00000000
p4 = re.compile(r'^\s*Oif-list: +\((?P<oif_count>[\d]+)\)'
' +(?P<oif_bf_str>[\S]+), +timeout\-list: +\((?P<timeout_count>[\d]+)\)'
' +(?P<timeout_bf_str>[\S]+)$')
m = p4.match(line)
if m:
oif_count = m.groupdict()['oif_count']
oif_bf_str = m.groupdict()['oif_bf_str']
oif_timeout_count = m.groupdict()['timeout_count']
oif_timeout_bf_str = m.groupdict()['timeout_bf_str']
# Immediate-list: (0) 00000000, timeout-list: (0) 00000000
p5 = re.compile(r'^\s*Immediate-list: +\((?P<imm_count>[\d]+)\)'
' +(?P<imm_bf_str>[\S]+), +timeout\-list: +\((?P<timeout_count>[\d]+)\)'
' +(?P<timeout_bf_str>[\S]+)$')
m = p5.match(line)
if m:
imm_count = m.groupdict()['imm_count']
immf_bf_str = m.groupdict()['imm_bf_str']
imm_timeout_count = m.groupdict()['timeout_count']
imm_timeout_bf_str = m.groupdict()['timeout_bf_str']
# Sgr-prune-list: (0) 00000000
p5 = re.compile(r'^\s*Sgr-prune-list: +\((?P<sgr_count>[\d]+)\)'
' +(?P<sgr_prune_str>[\S]+)$')
m = p5.match(line)
if m:
sgr_count = m.groupdict()['sgr_count']
sgr_prune_str = m.groupdict()['sgr_prune_str']
# Timeout-interval: 3, JP-holdtime round-up: 3
p6 = re.compile(r'^\s*Timeout-interval: +(?P<timeout_interval>\d+)'
', +JP-holdtime +round-up: +(?P<jp_round_up>\d+)$')
m = p6.match(line)
if m:
timeout_interval = m.groupdict()['timeout_interval']
jp_round_up = m.groupdict()['jp_round_up']
if vrf_name and group:
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['address_family']:
parsed_output['vrf'][vrf_name]['address_family'][af_name] = {}
if 'topology_tree_info' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] = {}
topology = group + " " + source_address + " " + str(is_rpt)
if topology not in parsed_output['vrf'][vrf_name]['address_family']\
[af_name]['topology_tree_info']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology] = {}
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info']\
[topology]['group'] = group
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info']\
[topology]['source_address'] = source_address
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info']\
[topology]['expiration'] = expires
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['is_rpt'] = is_rpt
if mode:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['mode'] = mode
if rp_bit:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['rp_bit'] = rp_bit
if incoming_interface:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['incoming_interface'] = incoming_interface
if rp_address:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['rp_address'] = rp_address
if rpf_nbr:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['rpf_neighbor'] = rpf_nbr
if oif_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['oif_count'] = int(oif_count)
if oif_bf_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['oif'] = oif_bf_str
if oif_timeout_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['oif_timeout_count'] = int(oif_timeout_count)
if oif_timeout_bf_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['oif_timeout'] = oif_timeout_bf_str
if imm_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['immediate_count'] = int(imm_count)
if immf_bf_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['immediate'] = immf_bf_str
if imm_timeout_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['immediate_timeout_count'] = int(imm_timeout_count)
if imm_timeout_bf_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['immediate_timeout'] = imm_timeout_bf_str
if sgr_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['sgr_prune_count'] = int(sgr_count)
if sgr_prune_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['sgr_prune'] = sgr_prune_str
if timeout_interval:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['timeout_interval'] = int(timeout_interval)
if jp_round_up:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['jp_holdtime_roundup'] = int(jp_round_up)
if route_fabric_owned:
if route_fabric_owned.lower() == 'false':
route_val = False
else:
route_val = True
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['route_fabric_owned'] = route_val
continue
return parsed_output
# ===========================================
# schema Parser for 'show ipv6 pim neighbor'
# ===========================================
class ShowIpv6PimNeighborSchema(MetaParser):
"""Schema for show ipv6 pim neighbor"""
schema = {
'vrf':{
Any():{
'interfaces':{
Any():{
'address_family':{
Any():{
'neighbors':{
Any():{
Optional('bfd_status'): bool,
Optional('expiration'): str,
Optional('dr_priority'): int,
Optional('up_time'): str,
Optional('interface'): str,
Optional('bidir_capable'): bool,
},
Optional('secondary_address'): list,
},
},
},
},
},
},
},
}
# ==========================================================
# parser for show ipv6 pim neighbor
# parser for show ipv6 pim neighbor vrf <word>
# ==========================================================
class ShowIpv6PimNeighbor(ShowIpv6PimNeighborSchema):
"""Parser for:
show ipv6 pim neighbor
show ipv6 pim neighbor vrf <vrf>"""
cli_command = ['show ipv6 pim neighbor','show ipv6 pim neighbor vrf {vrf}']
def cli(self, vrf="",output=None):
if output is None:
if not vrf:
cmd = self.cli_command[0]
else:
cmd = self.cli_command[1].format(vrf=vrf)
output = self.device.execute(cmd)
else:
out = output
af_name = 'ipv6'
# Init dictionary
parsed_output = dict()
second_address_flag = False
secondary_address = []
for line in output.splitlines():
line = line.strip()
# PIM Neighbor Status for VRF "VRF1"
p1 = re.compile(r'^PIM6 +Neighbor +Status +for +VRF +\"(?P<vrf_name>[\S]+)\"$')
m = p1.match(line)
if m:
vrf_name = m.groupdict()['vrf_name']
neighbor = ""
secondary_address = []
continue
# Neighbor Address Interface Uptime Expires DR Bidir- BFD
# Pri Capable State
# fe80::5054:ff:fe5b:aa80 Eth2/2 07:31:36 00:01:28 1 yes n/a
p2 = re.compile(r'^(?P<neighbor>[\S]+)'
' +(?P<intf_name>[\S]+)'
' +(?P<up_time>[\S]+)'
' +(?P<expires>[\S]+)'
' +(?P<dr_priority>\d+)'
' +(?P<bidir_capable>\w+)'
' +(?P<bfd_state>[\S]+)$')
m = p2.match(line)
if m:
second_address_flag = False
neighbor = m.groupdict()['neighbor']
intf_name = Common.convert_intf_name(m.groupdict()['intf_name'])
up_time = m.groupdict()['up_time']
expires = m.groupdict()['expires']
dr_priority = int(m.groupdict()['dr_priority'])
bidir_capable = True if m.groupdict()['bidir_capable'].lower() == 'yes' else False
bfd_state = m.groupdict()['bfd_state']
if intf_name and vrf_name:
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'interfaces' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['interfaces'] = {}
if intf_name not in parsed_output['vrf'][vrf_name]['interfaces']:
parsed_output['vrf'][vrf_name]['interfaces'][intf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]['interfaces'][intf_name]:
parsed_output['vrf'][vrf_name]['interfaces'][intf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['interfaces']\
[intf_name]['address_family']:
parsed_output['vrf'][vrf_name]['interfaces'][intf_name]\
['address_family'][af_name] = {}
if 'neighbors' not in parsed_output['vrf'][vrf_name]['interfaces']\
[intf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'] = {}
if neighbor not in parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors']:
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor] = {}
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]\
['neighbors'][neighbor]['bfd_status'] = False if 'n/a' in bfd_state else True
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]['expiration'] = expires
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]['dr_priority'] = dr_priority
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]['up_time'] = up_time
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]['interface'] =intf_name
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]\
['bidir_capable'] = bidir_capable
continue
# Secondary addresses:
p3 = re.compile(r'^Secondary +addresses:$')
m = p3.match(line)
if m:
second_address_flag = True
continue
# 2001:db8:11:33::33
p4 = re.compile(r'^(?P<secondary_address>([a-zA-Z0-9\.\:]+))$')
m = p4.match(line)
if second_address_flag and m:
secondary_address.append(m.groupdict()['secondary_address'])
if intf_name and vrf_name:
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'interfaces' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['interfaces'] = {}
if intf_name not in parsed_output['vrf'][vrf_name]['interfaces']:
parsed_output['vrf'][vrf_name]['interfaces'][intf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]['interfaces'][intf_name]:
parsed_output['vrf'][vrf_name]['interfaces'][intf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['interfaces']\
[intf_name]['address_family']:
parsed_output['vrf'][vrf_name]['interfaces'][intf_name]\
['address_family'][af_name] = {}
if 'neighbors' not in parsed_output['vrf'][vrf_name]['interfaces']\
[intf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'] = {}
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'] \
['secondary_address'] = secondary_address
continue
return parsed_output
# ============================================
# schema Parser for 'show ip pim route'
# ============================================
class ShowIpPimRouteSchema(MetaParser):
"""Schema for:
show ip pim route
show ip pim route vrf <vrf>"""
schema = {
'vrf':{
Any():{
'address_family':{
Any():{
Optional('topology_tree_info'): { # Ops From cmds [show ip mroute vrf all|show ipv6 mroute vrf all]
Any():{#[group source_address is_rpt]: { # Ops '232.0.0.0/8 * True'
Optional('group'): str, # Ops '232.0.0.0/8'
Optional('source_address'): str, # Ops '*'|'192.168.1.1'
Optional('is_rpt'): bool, # Ops True|False (True if source-address is '*')
Optional('rp_bit'): bool,
Optional('expiration'): str, # Ops '00:01:58' from show ip|ipv6 pim route vrf all
Optional('incoming_interface'): str, # Ops 'Ethernet1/34'
Optional('mode'): str, # Ops 'none'|'ssm'|'asm'|'bidir'|'dm'|'other'
Optional('rp_address'): str, # Ops '10.16.2.2'
Optional('rpf_neighbor'): str, # Ops '10.144.0.1'
Optional('jp_holdtime_roundup'): int,
Optional('oif'):str,
Optional('oif_count'):int,
Optional('oif_timeout_count'):int,
Optional('oif_timeout'):str,
Optional('immediate'): str,
Optional('immediate_count'): int,
Optional('immediate_timeout_count'):int,
Optional('immediate_timeout'): str,
Optional('timeout_interval'): int,
Optional('sgr_prune_count'): int,
Optional('sgr_prune'): str,
},
},
},
},
},
},
}
# ==========================================================
# parser for show ip pim route
# parser for show ip pim route vrf <word>
# ==========================================================
class ShowIpPimRoute(ShowIpPimRouteSchema):
"""Parser for:
show ip pim route
show ip pim route vrf <vrf>"""
cli_command = ['show ip pim route','show ip pim route vrf {vrf}']
def cli(self, vrf="",output=None):
if output is None:
if not vrf:
cmd = self.cli_command[0]
else:
cmd = self.cli_command[1].format(vrf=vrf)
output = self.device.execute(cmd)
else:
out = output
af_name = 'ipv4'
rp_bit = False
is_rpt = False
mode = vrf_name = group = incoming_interface = imm_timeout_bf_str= imm_count = imm_timeout_count=\
immf_bf_str = oif_bf_str = oif_timeout_bf_str = oif_count = oif_timeout_count = rp_address =\
timeout_interval = sgr_count = sgr_prune_str = jp_round_up = rpf_nbr = expires = ""
# Init dictionary
parsed_output = dict()
for line in output.splitlines():
line = line.rstrip()
# PIM Routing Table for VRF "VRF1" - 1 entries
p1 = re.compile(r'^\s*PIM +Routing +Table +for +VRF +\"(?P<vrf_name>[\S]+)\" +\-'
' +(?P<counter>\d+) +entries$')
m = p1.match(line)
if m:
vrf_name = m.groupdict()['vrf_name']
group = ""
# (*, 232.0.0.0/8), expires 0.000000 (00:00:01)
# (*, 233.0.0.0/24), RP 10.66.12.12, bidir, expires 00:01:58, RP-bit
p2 = re.compile(r'^\s*\((?P<source_address>[\S]+)'
', +(?P<group>[\S]+)\),'
'( +RP +(?P<rp>[\S\-]+),)?( +(?P<mode>\w+),)?'
' +expires +(?P<expires>[\w\.\:\(\)\s]+)'
'(, +(?P<rp_bit>[\S]+))?$')
m = p2.match(line)
if m:
rp_bit = False
mode = incoming_interface = imm_timeout_bf_str = \
imm_count = imm_timeout_count = \
immf_bf_str = oif_bf_str = oif_timeout_bf_str = oif_count \
= oif_timeout_count = rp_address = \
timeout_interval = sgr_count = sgr_prune_str = jp_round_up = rpf_nbr = expires = ""
group = m.groupdict()['group']
source_address = m.groupdict()['source_address']
if source_address == '*':
is_rpt = True
else:
is_rpt = False
expire_value = m.groupdict()['expires']
if '(' in expire_value:
p2_1 = re.compile('^\s*(?P<expires_1>[\d\S]+) +\((?P<expires_2>[\S]+)\)$')
match_1 = p2_1.match(expire_value)
if match_1:
expires = match_1.groupdict()['expires_2']
else:
expires = expire_value
if m.groupdict()['mode']:
mode = m.groupdict()['mode']
if m.groupdict()['rp_bit']:
rp_bit = True
if m.groupdict()['rp']:
rp_address = m.groupdict()['rp'].replace('*','')
# Incoming interface: Null, RPF nbr 0.0.0.0
p3 = re.compile(r'^\s*Incoming +interface: (?P<incoming_interface>[\S]+),'
' +RPF +nbr +(?P<rpf_nbr>[\S]+)$')
m = p3.match(line)
if m:
incoming_interface = m.groupdict()['incoming_interface']
rpf_nbr = m.groupdict()['rpf_nbr']
# Oif-list: (0) 00000000, timeout-list: (0) 00000000
p4 = re.compile(r'^\s*Oif-list: +\((?P<oif_count>[\d]+)\)'
' +(?P<oif_bf_str>[\S]+), +timeout\-list: +\((?P<timeout_count>[\d]+)\)'
' +(?P<timeout_bf_str>[\S]+)$')
m = p4.match(line)
if m:
oif_count = m.groupdict()['oif_count']
oif_bf_str = m.groupdict()['oif_bf_str']
oif_timeout_count = m.groupdict()['timeout_count']
oif_timeout_bf_str = m.groupdict()['timeout_bf_str']
# Immediate-list: (0) 00000000, timeout-list: (0) 00000000
p5 = re.compile(r'^\s*Immediate-list: +\((?P<imm_count>[\d]+)\)'
' +(?P<imm_bf_str>[\S]+), +timeout\-list: +\((?P<timeout_count>[\d]+)\)'
' +(?P<timeout_bf_str>[\S]+)$')
m = p5.match(line)
if m:
imm_count = m.groupdict()['imm_count']
immf_bf_str = m.groupdict()['imm_bf_str']
imm_timeout_count = m.groupdict()['timeout_count']
imm_timeout_bf_str = m.groupdict()['timeout_bf_str']
# Sgr-prune-list: (0) 00000000
p5 = re.compile(r'^\s*Sgr-prune-list: +\((?P<sgr_count>[\d]+)\)'
' +(?P<sgr_prune_str>[\S]+)$')
m = p5.match(line)
if m:
sgr_count = m.groupdict()['sgr_count']
sgr_prune_str = m.groupdict()['sgr_prune_str']
# Timeout-interval: 3, JP-holdtime round-up: 3
p6 = re.compile(r'^\s*Timeout-interval: +(?P<timeout_interval>\d+)'
', +JP-holdtime +round-up: +(?P<jp_round_up>\d+)$')
m = p6.match(line)
if m:
timeout_interval = m.groupdict()['timeout_interval']
jp_round_up = m.groupdict()['jp_round_up']
if vrf_name and group:
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['address_family']:
parsed_output['vrf'][vrf_name]['address_family'][af_name] = {}
if 'topology_tree_info' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] = {}
topology = group + " " + source_address + " " + str(is_rpt)
if topology not in parsed_output['vrf'][vrf_name]['address_family']\
[af_name]['topology_tree_info']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology] = {}
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info']\
[topology]['group'] = group
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info']\
[topology]['source_address'] = source_address
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info']\
[topology]['expiration'] = expires
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['is_rpt'] = is_rpt
if mode:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['mode'] = mode
if rp_bit:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['rp_bit'] = rp_bit
if incoming_interface:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['incoming_interface'] = incoming_interface
if rp_address:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['rp_address'] = rp_address
if rpf_nbr:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['rpf_neighbor'] = rpf_nbr
if oif_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['oif_count'] = int(oif_count)
if oif_bf_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['oif'] = oif_bf_str
if oif_timeout_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['oif_timeout_count'] = int(oif_timeout_count)
if oif_timeout_bf_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['oif_timeout'] = oif_timeout_bf_str
if imm_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['immediate_count'] = int(imm_count)
if immf_bf_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['immediate'] = immf_bf_str
if imm_timeout_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['immediate_timeout_count'] = int(imm_timeout_count)
if imm_timeout_bf_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['immediate_timeout'] = imm_timeout_bf_str
if sgr_count:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['sgr_prune_count'] = int(sgr_count)
if sgr_prune_str:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['sgr_prune'] = sgr_prune_str
if timeout_interval:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['timeout_interval'] = int(timeout_interval)
if jp_round_up:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['topology_tree_info'] \
[topology]['jp_holdtime_roundup'] = int(jp_round_up)
continue
return parsed_output
# ==================================================
# schema Parser for 'show ipv6 pim group-range'
# ==================================================
class ShowIpv6PimGroupRangeSchema(MetaParser):
"""Schema for show ipv6 pim group-range"""
schema = {
'vrf':{
Any():{
'address_family':{
Any():{
'sm':{
Any():{
Any():{
Optional('mode'): str,
Optional('rp_address'): str,
Optional('shared_tree_only'): str,
Optional('range'): str,
},
},
},
},
},
},
},
}
# ==========================================================
# parser for show ipv6 pim group-range
# parser for show ipv6 pim group-range vrf <vrf>
# ==========================================================
class ShowIpv6PimGroupRange(ShowIpv6PimGroupRangeSchema):
"""Parser for:
show ipv6 pim group-range
show ipv6 pim group-range vrf <vrf>"""
cli_command = ['show ipv6 pim group-range','show ipv6 pim group-range vrf {vrf}']
def cli(self, vrf="",output=None):
if output is None:
if not vrf:
cmd = self.cli_command[0]
else:
cmd = self.cli_command[1].format(vrf=vrf)
output = self.device.execute(cmd)
else:
out = output
af_name = 'ipv6'
# Init dictionary
parsed_output = dict()
if vrf:
vrf_name = vrf
for line in output.splitlines():
line = line.rstrip()
# PIM6 Group-Range Configuration for VRF "VRF1"
p1 = re.compile(r'^\s*PIM6 +Group\-Range +Configuration +for +VRF +\"(?P<vrf_name>[\S]+)\"$')
m = p1.match(line)
if m:
vrf_name = m.groupdict()['vrf_name']
mode = ""
continue
# Group-range Mode RP-address Shared-tree-only range
# ff3x::/32 SSM - -
p2 = re.compile(r'^\s*(?P<group_range>[^\s*Group-range][\S]+)'
' +(?P<mode>[\S]+)'
' +(?P<rp_address>[\S]+)'
'( +(?P<shared_tree_only>[\S]+))?'
'( +(?P<range>[\S]+))?$')
m = p2.match(line)
if m:
group_range = m.groupdict()['group_range']
mode = m.groupdict()['mode'].lower()
rp_address = m.groupdict()['rp_address']
if m.groupdict()['shared_tree_only']:
shared_tree_only = m.groupdict()['shared_tree_only']
if m.groupdict()['range']:
range = m.groupdict()['range']
if group_range and vrf_name and mode:
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['address_family']:
parsed_output['vrf'][vrf_name]['address_family'][af_name] = {}
if 'sm' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'] = {}
if mode not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode] = {}
if group_range not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode]:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range] = {}
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range]\
['mode'] = mode
if '-' not in rp_address:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range]\
['rp_address'] = rp_address
if m.groupdict()['shared_tree_only'] and '-' not in m.groupdict()['shared_tree_only']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range] \
['shared_tree_only'] = shared_tree_only
if m.groupdict()['range'] and '-' not in m.groupdict()['range']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range] \
['range'] = range.lower()
continue
return parsed_output
# ==================================================
# schema Parser for 'show ip pim neighbor'
# ==================================================
class ShowIpPimNeighborSchema(MetaParser):
"""Schema for show ip pim neighbor"""
schema = {
'vrf':{
Any():{
'interfaces':{
Any():{
'address_family':{
Any():{
'neighbors':{
Any():{
'bfd_status': bool,
'expiration': str,
'dr_priority': int,
'up_time': str,
'interface': str,
'bidir_capable': bool,
},
},
},
},
},
},
},
},
}
# ==========================================================
# parser for show ip pim neighbor vrf all
# parser for show ip pim neighbor vrf <vrf>
# ==========================================================
class ShowIpPimNeighbor(ShowIpPimNeighborSchema):
"""Parser for:
show ip pim neighbor
show ip pim neighbor vrf <vrf>"""
cli_command = ['show ip pim neighbor','show ip pim neighbor vrf {vrf}']
def cli(self, vrf="",output=None):
if output is None:
if not vrf:
cmd = self.cli_command[0]
else:
cmd = self.cli_command[1].format(vrf=vrf)
output = self.device.execute(cmd)
else:
out = output
af_name = 'ipv4'
# Init dictionary
parsed_output = dict()
for line in output.splitlines():
line = line.rstrip()
# PIM Neighbor Status for VRF "VRF1"
p1 = re.compile(r'^\s*PIM +Neighbor +Status +for +VRF +\"(?P<vrf_name>[\S]+)\"$')
m = p1.match(line)
if m:
vrf_name = m.groupdict()['vrf_name']
continue
# Neighbor Interface Uptime Expires DR Bidir- BFD
# Priority Capable State
# 10.11.33.33 Ethernet2/2 07:31:30 00:01:25 1 yes n/a
# Neighbor Interface Uptime Expires DR Bidir- BFD ECMP Redirect
# Priority Capable State Capable
# 10.2.3.3 Ethernet1/3.11 00:31:08 00:01:39 1 yes n/a no
p2 = re.compile(r'^\s*(?P<neighbor>[\S]+)'
' +(?P<intf_name>[\S]+)'
' +(?P<up_time>[\S]+)'
' +(?P<expires>[\S]+)'
' +(?P<dr_priority>\d+)'
' +(?P<bidir_capable>\w+)'
' +(?P<bfd_state>[\S]+)'
'( +(?P<redict_capable>[\S]+))?$')
m = p2.match(line)
if m:
neighbor = m.groupdict()['neighbor']
intf_name = m.groupdict()['intf_name']
up_time = m.groupdict()['up_time']
expires = m.groupdict()['expires']
dr_priority = int(m.groupdict()['dr_priority'])
bidir_capable = True if m.groupdict()['bidir_capable'].lower() == 'yes' else False
bfd_state = m.groupdict()['bfd_state']
if intf_name and vrf_name:
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'interfaces' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['interfaces'] = {}
if intf_name not in parsed_output['vrf'][vrf_name]['interfaces']:
parsed_output['vrf'][vrf_name]['interfaces'][intf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]['interfaces'][intf_name]:
parsed_output['vrf'][vrf_name]['interfaces'][intf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['interfaces']\
[intf_name]['address_family']:
parsed_output['vrf'][vrf_name]['interfaces'][intf_name]\
['address_family'][af_name] = {}
if 'neighbors' not in parsed_output['vrf'][vrf_name]['interfaces']\
[intf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'] = {}
if neighbor not in parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors']:
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor] = {}
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]\
['neighbors'][neighbor]['bfd_status'] = False if 'n/a' in bfd_state else True
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]['expiration'] = expires
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]['dr_priority'] = dr_priority
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]['up_time'] = up_time
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]['interface'] =intf_name
parsed_output['vrf'][vrf_name]['interfaces'] \
[intf_name]['address_family'][af_name]['neighbors'][neighbor]['bidir_capable'] = bidir_capable
continue
return parsed_output
# =============================================
# schema Parser for 'show ip pim vrf all detail'
# ==============================================
class ShowIpPimVrfDetailSchema(MetaParser):
"""Schema for show ip pim vrf all detail"""
schema = {
'vrf':{
Any():{
'address_family':{
Any():{
Optional('sm'):{
Optional('asm'):{
Optional('register_source'): str,
Optional('register_source_address'): str,
Optional('sg_expiry_timer'):{
Optional('sg_list'): str,
Optional('infinity'): bool,
Optional('sg_expiry_timer_configured'): bool,
Optional('sg_expiry_timer'): int,
Optional('config_version'): int,
Optional('active_version'): int,
},
},
},
Optional('vrf_id'): int,
Optional('table_id'): str,
Optional('interface_count'): int,
Optional('bfd'): {
Optional('enable'): bool,
},
Optional('mvpn'): {
Optional('enable'): bool,
},
Optional('state_limit'): str,
Optional('register_rate_limit'): str,
Optional('cli_vrf_done'): bool,
Optional('cibtype_auto_enabled'): bool,
Optional('vxlan_vni_id'): int,
Optional('shared_tree_ranges'): str,
Optional('pre_build_spt'): str,
},
},
},
},
}
# ==========================================================
# parser for show ip pim vrf all detail
#
# ==========================================================
class ShowIpPimVrfDetail(ShowIpPimVrfDetailSchema):
"""Parser for:
show ip pim vrf detail
show ip pim vrf <vrf> detail"""
cli_command = ['show ip pim vrf {vrf} detail','show ip pim vrf detail']
def cli(self, vrf="",output=None):
if output is None:
if vrf:
cmd = self.cli_command[0].format(vrf=vrf)
else:
cmd = self.cli_command[1]
out = self.device.execute(cmd)
else:
out = output
af_name = 'ipv4'
# Init dictionary
parsed_dict = dict()
intf_name = ""
expiry_timer_configured = sg_expiry_timer_infinity = False
sg_expiry_timer = ""
for line in out.splitlines():
line = line.rstrip()
#VRF Name VRF Table Interface BFD MVPN
# ID ID Count Enabled Enabled
# default 1 0x00000001 3 no no
p1 = re.compile(r'^\s*(?P<vrf>[\w\d]+) +(?P<vrf_id>\d+)'
' +(?P<table_id>0x[a_f0-9]+) +(?P<interface_count>\d+)'
' +(?P<bfd>\w+)? +(?P<mvpn>\w+)?$')
m = p1.match(line)
if m:
expiry_timer_configured = sg_expiry_timer_infinity = False
sg_expiry_timer = ""
vrf_name = m.groupdict()['vrf']
vrf_id = int(m.groupdict()['vrf_id'])
table_id = m.groupdict()['table_id']
interface_count = int(m.groupdict()['interface_count'])
bfd_enabled = True if m.groupdict()['bfd'].lower() == 'yes' else False
mvpn_enabled = True if m.groupdict()['mvpn'].lower() == 'yes' else False
if 'vrf' not in parsed_dict:
parsed_dict['vrf'] = {}
if vrf_name not in parsed_dict['vrf']:
parsed_dict['vrf'][vrf_name] = {}
if 'address_family' not in parsed_dict['vrf'][vrf_name]:
parsed_dict['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf_name]['address_family']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name] = {}
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['vrf_id'] = vrf_id
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['table_id'] = table_id
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['interface_count'] = interface_count
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['bfd'] = {}
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['bfd']['enable'] = bfd_enabled
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['mvpn'] = {}
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['mvpn']['enable'] = mvpn_enabled
continue
# State Limit: None
p2 = re.compile(r'^\s*State +Limit: +(?P<state_limit>(?!None)\w+)$')
m = p2.match(line)
if m:
state_limit = m.groupdict()['state_limit'].lower()
if 'address_family' not in parsed_dict['vrf'][vrf_name]:
parsed_dict['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf_name]['address_family']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name] = {}
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['state_limit'] = state_limit
# Register Rate Limit: none
p3 = re.compile(r'^\s*Register +Rate +Limit: +(?P<register_rate_limit>(?!none)\w+)$')
m = p3.match(line)
if m:
register_rate_limit = m.groupdict()['register_rate_limit'].lower()
if 'address_family' not in parsed_dict['vrf'][vrf_name]:
parsed_dict['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf_name]['address_family']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name] = {}
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['register_rate_limit'] = register_rate_limit
# Register source interface : loopback0 address : 10.4.1.1
p4 = re.compile(r'^\s*Register +source +interface +: +(?P<intf_name>[\w\d\S]+)'
' +address +: +(?P<address>[\w\d\S]+)$')
m = p4.match(line)
if m:
register_source = m.groupdict()['intf_name']
register_source_address = m.groupdict()['address']
if 'address_family' not in parsed_dict['vrf'][vrf_name]:
parsed_dict['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf_name]['address_family']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name] = {}
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['vrf_id'] = vrf_id
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['table_id'] = table_id
if register_source:
if 'sm' not in parsed_dict['vrf'][vrf_name]['address_family'][af_name]:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm'] = {}
if 'asm' not in parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm'] = {}
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm']\
['register_source'] = register_source
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm'] \
['register_source_address'] = register_source_address
continue
# Shared tree ranges: none
p5 = re.compile(r'^\s*Shared +tree +ranges: +(?P<shared_tree_ranges>(?!none)\w+)$')
m = p5.match(line)
if m:
shared_tree_ranges = m.groupdict()['shared_tree_ranges']
if 'address_family' not in parsed_dict['vrf'][vrf_name]:
parsed_dict['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf_name]['address_family']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name] = {}
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['shared_tree_ranges'] = shared_tree_ranges
#continue
# (S,G)-expiry timer: configured, infinity
# (S,G)-expiry timer: configured, 1200 secs
p7 = re.compile(r'^\s*\(S\,G\)\-expiry +timer: +(?P<expiry_timer_configured>\w+)'
'(, +(?P<sg_expiry_timer_infinity>[\w]+))?'
'(, +(?P<sg_expiry_timer>[\d]+) +secs)?$')
m = p7.match(line)
if m:
if m.groupdict()['expiry_timer_configured']:
expiry_timer_configured = True if m.groupdict()['expiry_timer_configured']\
== 'configured' else False
if m.groupdict()['sg_expiry_timer_infinity']:
sg_expiry_timer_infinity = True if m.groupdict()['sg_expiry_timer_infinity']\
.lower() == 'infinity' else False
if m.groupdict()['sg_expiry_timer']:
sg_expiry_timer = m.groupdict()['sg_expiry_timer']
if 'address_family' not in parsed_dict['vrf'][vrf_name]:
parsed_dict['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf_name]['address_family']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name] = {}
if 'sm' not in parsed_dict['vrf'][vrf_name]['address_family'][af_name]:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm'] = {}
if 'asm' not in parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm'] = {}
if 'sg_expiry_timer' not in parsed_dict['vrf'][vrf_name]['address_family']\
[af_name]['sm']['asm']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm']['sg_expiry_timer'] = {}
if sg_expiry_timer_infinity:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm']\
['sg_expiry_timer']['infinity'] = sg_expiry_timer_infinity
if expiry_timer_configured:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm'] \
['sg_expiry_timer']['sg_expiry_timer_configured'] = expiry_timer_configured
if sg_expiry_timer:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm'] \
['sg_expiry_timer']['sg_expiry_timer'] = int(sg_expiry_timer)
continue
# (S,G)-list policy: sg-expiry-timer-sg-list
p8 = re.compile(r'^\s*\(S\,G\)\-list +policy: +(?P<sg_list_policy>(?!none)[\w\S]+)$')
m = p8.match(line)
if m:
sg_list_policy = m.groupdict()['sg_list_policy']
if 'sm' not in parsed_dict['vrf'][vrf_name]['address_family'][af_name]:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm'] = {}
if 'asm' not in parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm'] = {}
if 'sg_expiry_timer' not in parsed_dict['vrf'][vrf_name]['address_family']\
[af_name]['sm']['asm']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm']['sg_expiry_timer'] = {}
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm']\
['sg_expiry_timer']['sg_list'] = sg_list_policy
continue
# (S,G)-expiry timer config version 1, active version 1
p9 = re.compile(r'^\s*\(S\,G\)\-expiry +timer +config +version +(?P<expiry_timer_config_version>\d+)'
'+, +active +version +(?P<expiry_timer_active_version>\d+)$')
m = p9.match(line)
if m:
expiry_timer_config_version = int(m.groupdict()['expiry_timer_config_version'])
expiry_timer_active_version = int(m.groupdict()['expiry_timer_active_version'])
if 'sm' not in parsed_dict['vrf'][vrf_name]['address_family'][af_name]:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm'] = {}
if 'asm' not in parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm'] = {}
if 'sg_expiry_timer' not in parsed_dict['vrf'][vrf_name]['address_family']\
[af_name]['sm']['asm']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm']['sg_expiry_timer'] = {}
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm']\
['sg_expiry_timer']['active_version'] = expiry_timer_active_version
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['sm']['asm'] \
['sg_expiry_timer']['config_version'] = expiry_timer_config_version
continue
# Pre-build SPT for all (S,G)s in VRF: disabled
p10 = re.compile(r'^\s*Pre\-build +SPT +for +all +\(S\,G\)s +in +VRF: +(?P<pre_build_spt>\w+)$')
m = p10.match(line)
if m:
pre_build_spt = m.groupdict()['pre_build_spt']
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['pre_build_spt'] = pre_build_spt
continue
# CLI vrf done: TRUE
p11 = re.compile(r'^\s*CLI +vrf +done: +(?P<cli_vrf_done>\w+)$')
m= p11.match(line)
if m:
cli_vrf_done = True if m.groupdict()['cli_vrf_done'].lower() == 'true' else False
parsed_dict['vrf'][vrf_name]['address_family']\
[af_name]['cli_vrf_done'] = cli_vrf_done
continue
# PIM cibtype Auto Enabled: yes
p12 = re.compile(r'^\s*PIM +cibtype +[a|A]uto +[e|E]nabled: +(?P<cibtype_auto_enabled>\w+)$')
m = p12.match(line)
if m:
cibtype_auto_enabled = True if m.groupdict()['cibtype_auto_enabled'].lower() == 'yes' else False
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['cibtype_auto_enabled'] = cibtype_auto_enabled
continue
# PIM VxLAN VNI ID: 0
p13 = re.compile(r'^\s*PIM +VxLAN +VNI +ID: +(?P<vxvlan_vni_id>\d+)$')
m = p13.match(line)
if m:
vxlan_vni_id = int(m.groupdict()['vxvlan_vni_id'])
parsed_dict['vrf'][vrf_name]['address_family'][af_name]['vxlan_vni_id'] = vxlan_vni_id
continue
return parsed_dict
# ==================================================
# schema Parser for 'show ip pim group-range'
# ==================================================
class ShowIpPimGroupRangeSchema(MetaParser):
'''Schema for show ip pim group-range'''
schema = {
'vrf':{
Any():{
'address_family':{
Any():{
'sm':{
Any():{
Any():{
Optional('action'): str,
Optional('mode'): str,
Optional('rp_address'): str,
Optional('shared_tree_only'): str,
Optional('range'): str,
},
},
},
},
},
},
},
}
# ==========================================================
# parser for show ip pim group-range
# parser for show ip pim group-range vrf <vrf>
# ==========================================================
class ShowIpPimGroupRange(ShowIpPimGroupRangeSchema):
"""parser for:
show ip pim group-range
show ip pim group-range vrf <vrf>"""
cli_command = ['show ip pim group-range','show ip pim group-range vrf {vrf}']
def cli(self,vrf = "",output=None):
if output is None:
if not vrf:
cmd = self.cli_command[0]
else:
cmd = self.cli_command[1].format(vrf=vrf)
output = self.device.execute(cmd)
else:
out = output
af_name = 'ipv4'
# Init dictionary
parsed_output = dict()
vrf_name = ""
for line in output.splitlines():
line = line.rstrip()
# PIM Group-Range Configuration for VRF "VRF1"
p1 = re.compile(r'^\s*PIM +Group\-Range +Configuration +for +VRF +\"(?P<vrf_name>[\S]+)\"$')
m = p1.match(line)
if m:
vrf_name = m.groupdict()['vrf_name']
continue
# Group-range Action Mode RP-address Shared-tree-only range
# 232.0.0.0/8 Accept SSM - - Local
p2 = re.compile(r'^\s*(?P<group_range>[^\s*Group-range][\S]+)'
' +(?P<action>[\S]+)'
' +(?P<mode>[\S]+)'
' +(?P<rp_address>[\S]+)'
' +(?P<shared_tree_only>[\S]+)'
'( +(?P<range>[\S]+))?$')
m = p2.match(line)
if m:
group_range = m.groupdict()['group_range']
action = m.groupdict()['action']
mode = m.groupdict()['mode'].lower()
rp_address = m.groupdict()['rp_address']
shared_tree_only = m.groupdict()['shared_tree_only']
if m.groupdict()['range']:
range = m.groupdict()['range']
if group_range and vrf_name and mode:
if 'vrf' not in parsed_output:
parsed_output['vrf'] = {}
if vrf_name not in parsed_output['vrf']:
parsed_output['vrf'][vrf_name] = {}
if 'address_family' not in parsed_output['vrf'][vrf_name]:
parsed_output['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_output['vrf'][vrf_name]['address_family']:
parsed_output['vrf'][vrf_name]['address_family'][af_name] = {}
if 'sm' not in parsed_output['vrf'][vrf_name]['address_family'][af_name]:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'] = {}
if mode not in parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode] = {}
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range] = {}
if '-' not in action:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range]\
['action'] = action.lower()
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range]\
['mode'] = mode
if '-' not in rp_address:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range]\
['rp_address'] = rp_address
if '-' not in shared_tree_only:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range] \
['shared_tree_only'] = shared_tree_only
if m.groupdict()['range'] and '-' not in m.groupdict()['range']:
parsed_output['vrf'][vrf_name]['address_family'][af_name]['sm'][mode][group_range] \
['range'] = range.lower()
continue
return parsed_output
# =========================================================================
# schema for 'show ip pim policy statictics register_policy'
# =========================================================================
class ShowIpPimPolicyStaticticsRegisterPolicySchema(MetaParser):
"""Schema show ip pim policy statictics register_policy"""
schema = {
'vrf':{
Any():{
'address_family':{
Any():{
Optional('sm'):{
Optional('asm'):{
Optional('accept_register'): str,
Optional('register_policy'):{
Any():{
Optional('total_accept_count'): int,
Optional('total_reject_count'): int,
Any():{
Optional('compare_count'):int,
Optional('match_count'):int,
},
},
},
},
},
},
},
},
},
}
# =======================================================================
# parser for show ip pim policy statistics register-policy
# parser for show ip pim policy statistics register-policy vrf <vrf>
#
# =======================================================================
class ShowIpPimPolicyStaticticsRegisterPolicy(ShowIpPimPolicyStaticticsRegisterPolicySchema):
"""Parser for:
show ip pim policy statictics register_policy
show ip pim policy statictics register_policy vrf <vrf>"""
cli_command = ['show ip pim policy statistics register-policy','show ip pim policy statistics register-policy vrf {vrf}']
def cli(self, vrf=""):
cmd_vrf = "sh run | egrep '^vrf|register-policy'"
vrf_out = self.device.execute(cmd_vrf)
parsed_data = {}
vrf_name = 'default'
for each_line in vrf_out.splitlines():
each_line = each_line.rstrip()
# ip pim register-policy pim_register_p
p1 = re.compile(r'^\s*ip +pim +register-policy( +prefix-list)? +(?P<rg_policy_name>[\S]+)$')
m = p1.match(each_line)
if m:
parsed_data[vrf_name] = m.groupdict()['rg_policy_name']
continue
# vrf context VRF1
p1 = re.compile(r'^\s*vrf +context +(?P<vrf_name>[\S]+)$')
m = p1.match(each_line)
if m:
vrf_name = m.groupdict()['vrf_name']
continue
if not vrf:
cmd = self.cli_command[0]
else:
cmd = self.cli_command[1].format(vrf=vrf)
out = self.device.execute(cmd)
af_name = 'ipv4'
compare_count = match_count = accept_register = info =\
reject_policy_count = accept_policy_count = ""
# Init dictionary
parsed_dict = dict()
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
p0 = re.compile(r'^\s*C: +No. +of +comparisions, +M: +No. +of matches$')
m = p0.match(line)
if m:
compare_count = match_count = accept_register = info = \
reject_policy_count = accept_policy_count = ""
# route-map pim_register_vrf permit 10
p1 = re.compile(r'^\s*route-map +(?P<route_map>[\S]+)'
' +permit +(?P<route_map_permit>[\S]+)$')
m = p1.match(line)
if m:
accept_register = m.groupdict()['route_map']
route_map_permit = m.groupdict()['route_map_permit']
for key, value in parsed_data.items():
if accept_register == value:
vrf = key
# match ip multicast group 239.2.2.2/32 C: 0 M: 0
p1_1 = re.compile(r'^\s*match +ip +multicast +group +(?P<group>[\w\.\/]+)'
'( +[c|C]: +(?P<c>\d+))?'
'( +[m|M]: +(?P<m>\d+))?$')
m = p1_1.match(line)
if m:
group = m.groupdict()['group'].rstrip()
if m.groupdict()['c']:
compare_count = m.groupdict()['c']
else:
compare_count = ""
match_count = m.groupdict()['m']
info = "match ip multicast group " + group
# ip prefix-list testprefixlist seq 5 permit 239.3.3.3/32 M: 0
p2_1 = re.compile(r'^\s*ip +prefix-list +(?P<prefix_list>[\S]+)'
'( +seq +(?P<seq>\d+))?'
'( +permit +(?P<permit>[\S]+))?'
'( +[c|C]: +(?P<c>\d+))?'
'( +[m|M]: +(?P<m>\d+))?$')
m = p2_1.match(line)
if m:
accept_register = m.groupdict()['prefix_list']
for key, value in parsed_data.items():
if accept_register == value:
vrf = key
seq = m.groupdict()['seq']
permit = m.groupdict()['permit']
if m.groupdict()['c']:
compare_count = m.groupdict()['c']
else:
compare_count = ""
match_count = m.groupdict()['m']
info = 'ip prefix-list '+ accept_register +" seq "+ seq + " permit "+permit
# Total accept count for policy: 0
p3 = re.compile(r'^\s*Total +accept +count +for +policy: +(?P<accept_policy_count>\d+)$')
m = p3.match(line)
if m:
accept_policy_count = m.groupdict()['accept_policy_count']
# Total reject count for policy: 0
p4 = re.compile(r'^\s*Total +reject +count +for +policy: +(?P<reject_policy_count>\d+)$')
m = p4.match(line)
if m:
reject_policy_count = m.groupdict()['reject_policy_count']
if vrf and accept_register:
if 'vrf' not in parsed_dict:
parsed_dict['vrf'] = {}
if vrf not in parsed_dict['vrf']:
parsed_dict['vrf'][vrf] = {}
if 'address_family' not in parsed_dict['vrf'][vrf]:
parsed_dict['vrf'][vrf]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf]['address_family']:
parsed_dict['vrf'][vrf]['address_family'][af_name] = {}
if 'sm' not in parsed_dict['vrf'][vrf]['address_family'][af_name]:
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm'] = {}
if 'asm' not in parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']:
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm'] = {}
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm']['accept_register'] = accept_register
if 'register_policy' not in parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm']:
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm']['register_policy'] = {}
if accept_register not in parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']\
['asm']['register_policy']:
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm']['register_policy']\
[accept_register] = {}
if reject_policy_count:
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm']['register_policy'][accept_register] \
['total_reject_count'] = int(reject_policy_count)
if accept_policy_count:
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm']['register_policy'][accept_register] \
['total_accept_count'] = int(accept_policy_count)
if info and info not in parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm'] \
['register_policy'][accept_register]:
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm']['register_policy'] \
[accept_register][info] = {}
if info and compare_count :
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm']['register_policy'][accept_register] \
[info]['compare_count'] = int(compare_count)
if info and match_count :
parsed_dict['vrf'][vrf]['address_family'][af_name]['sm']['asm']['register_policy'][accept_register] \
[info]['match_count'] = int(match_count)
return parsed_dict
# ====================================================
# schema Parser for 'show ip pim interface'
# ====================================================
class ShowIpPimInterfaceSchema(MetaParser):
"""Schema for show ip pim interface"""
schema = {
'vrf':{
Any():{
'interfaces':{
Any():{
'address_family': {
Any(): {
Optional('oper_status'): str,
Optional('link_status'): str,
Optional('admin_status'): str,
Optional('address'): list,
Optional('ip_subnet'): str,
Optional('dr_address'): str,
Optional('dr_priority'): int,
Optional('configured_dr_priority'): int,
Optional('neighbor_count'): int,
Optional('hello_interval'): int,
Optional('hello_expiration'): str,
Optional('neighbor_holdtime'): int,
Optional('dr_delay'): int,
Optional('bsr_border'): bool,
Optional('genid'): str,
Optional('hello_md5_ah_authentication'): str,
Optional('neighbor_filter'): str,
Optional('jp_inbound_policy'): str,
Optional('jp_outbound_policy'): str,
Optional('jp_interval'): int,
Optional('jp_next_sending'): int,
Optional('bfd'):{
Optional('enable'): bool,
},
Optional('sm'):{
Optional('passive'): bool,
},
Optional('vpc_svi'): bool,
Optional('auto_enabled'): bool,
Optional('statistics'):{
Optional('general'):{
Optional('hellos'): str,
Optional('jps'): str,
Optional('asserts'): str,
Optional('grafts'): str,
Optional('graft_acks'): str,
Optional('df_offers'): str,
Optional('df_winners'): str,
Optional('df_backoffs'): str,
Optional('df_passes'): str,
},
Optional('errors'):{
Optional('checksum'): int,
Optional('invalid_packet_types'): int,
Optional('invalid_df_subtypes'): int,
Optional('authentication_failed'): int,
Optional('packet_length_errors'): int,
Optional('bad_version_packets'): int,
Optional('packets_from_self'): int,
Optional('packets_from_non_neighbors'): int,
Optional('packets_received_on_passiveinterface'): int,
Optional('jps_received_on_rpf_interface'): int,
Optional('joins_received_with_no_rp'): int,
Optional('joins_received_with_wrong_rp'): int,
Optional('joins_received_with_ssm_groups'): int,
Optional('joins_received_with_bidir_groups'): int,
Optional('jps_filtered_by_inbound_policy'): int,
Optional('jps_filtered_by_outbound_policy'): int,
},
},
},
},
},
},
},
},
}
# ==========================================================
# parser for show ip pim interface vrf <word>
# parser for show ip pim interface
# parser for show ip pim interface <word>
# parser for show ip pim interface <word1> vrf <word2>
#
# ==========================================================
class ShowIpPimInterface(ShowIpPimInterfaceSchema):
"""Parser for:
show ip pim interface
show ip pim interface vrf <vrf>
show ip pim interface <interface>
show ip pim interface <interface> vrf <vrf>"""
cli_command = ['show ip pim interface','show ip pim interface {interface}',\
'show ip pim interface vrf {vrf}','show ip pim interface {interface} vrf {vrf}']
def cli(self , interface ="", vrf="",output=None):
if output is None:
if not vrf and not interface:
cmd = self.cli_command[0]
if not vrf and interface:
cmd = self.cli_command[1].format(interface=interface)
if vrf and not interface:
cmd = self.cli_command[2].format(vrf=vrf)
if vrf and interface:
cmd = self.cli_command[3].format(interface=interface, vrf=vrf)
out = self.device.execute(cmd)
else:
out = output
af_name = 'ipv4'
# Init dictionary
parsed_dict = dict()
checksum = invalid_packet_types = invalid_df_subtypes = authentication_failed \
= packet_length_errors = bad_version_packets = packets_from_self =\
packets_from_non_neighbors = packets_received_on_passiveinterface = \
jps_received_on_rpf_interface = joins_received_with_bidir_groups = \
joins_received_with_no_rp = joins_received_with_ssm_groups = joins_received_with_wrong_rp = \
jps_filtered_by_inbound_policy = jps_filtered_by_outbound_policy = hellos = jps = asserts = grafts\
= graft_acks = df_backoffs = df_passes = df_winners = df_offers = ""
interface_status = interface_name = address = dr_address = bsr_border = hello_md5_ah_authentication = \
hello_interval = hello_expiration = dr_priority = configured_dr_delay = jp_next_sending = bfd\
= jp_interval = passive = auto_enabled = genid = jp_outbound_policy = jp_inbound_policy = \
nbr_count = ip_subnet = neighbor_holdtime = neighbor_filter = vpc_svi = bad_version_packets = ""
for line in out.splitlines():
line = line.rstrip()
#PIM Interface Status for VRF "VRF1"
p1 = re.compile(r'^\s*PIM +Interface +Status +for +VRF+ \"(?P<vrf>[\w]+)\"$')
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
checksum = invalid_packet_types = invalid_df_subtypes = authentication_failed \
= packet_length_errors = bad_version_packets = packets_from_self = \
packets_from_non_neighbors = packets_received_on_passiveinterface = \
jps_received_on_rpf_interface = joins_received_with_bidir_groups = \
joins_received_with_no_rp = joins_received_with_ssm_groups = joins_received_with_wrong_rp = \
jps_filtered_by_inbound_policy = jps_filtered_by_outbound_policy = hellos = jps = asserts = grafts \
= graft_acks = df_backoffs = df_passes = df_winners = df_offers = ""
interface_status = interface_name = address = dr_address = bsr_border = hello_md5_ah_authentication = \
hello_interval = hello_expiration = dr_priority = configured_dr_delay = jp_next_sending = bfd \
= jp_interval = passive = auto_enabled = genid = jp_outbound_policy = jp_inbound_policy = \
nbr_count = configured_dr_priority = ip_subnet = neighbor_holdtime = neighbor_filter = vpc_svi = bad_version_packets = ""
# Ethernet2/2, Interface status: protocol-up/link-up/admin-up
p2 = re.compile(r'^\s*(?P<interface_name>[\w\/\.\-]+),?'
' +Interface +status:'
' +protocol-(?P<oper_status>[\w]+)(/)?'
'link\-(?P<link_status>[\w]+)(/)?'
'admin\-(?P<admin_status>[\w]+)$')
m = p2.match(line)
if m:
interface_name = m.groupdict()['interface_name']
oper_status = m.groupdict()['oper_status']
link_status = m.groupdict()['link_status']
admin_status = m.groupdict()['admin_status']
jp_outbound_policy = jp_inbound_policy = neighbor_filter = ""
# IP address: 10.11.33.11, IP subnet: 10.11.33.0/24
p3 = re.compile(r'^\s*IP +address: +(?P<address>[\w\.]+),'
' +IP +subnet: +(?P<ip_subnet>[\w\.\/]+)$')
m = p3.match(line)
if m:
address = m.groupdict()['address']
ip_subnet = m.groupdict()['ip_subnet']
# PIM DR: 10.11.33.11, DR's priority: 144
p4 = re.compile(r'^\s*PIM +DR: +(?P<dr_address>[\w\.]+),'
' +DR\'s +priority: +(?P<dr_priority>[\d]+)$')
m = p4.match(line)
if m:
dr_address = m.groupdict()['dr_address']
dr_priority = m.groupdict()['dr_priority']
# PIM neighbor count: 1
p5 = re.compile(r'^\s*PIM +neighbor +count: +(?P<nbr_count>[\d]+)$')
m = p5.match(line)
if m:
nbr_count = m.groupdict()['nbr_count']
# PIM hello interval: 45 secs (configured 44444 ms), next hello sent in: 00:00:05
p6 = re.compile(r'^\s*PIM +hello +interval: +(?P<hello_interval>[\d]+) +secs'
'( +\(configured +(?P<configured_interval_ms>\d+) +ms\))?,'
' +next +hello +sent +in: +(?P<hello_expiration>[\w\:]+)$')
m = p6.match(line)
if m:
hello_interval = m.groupdict()['hello_interval']
hello_expiration = m.groupdict()['hello_expiration']
# PIM neighbor holdtime: 159 secs
p7 = re.compile(r'^\s*PIM +neighbor +holdtime: +(?P<holdtime>[\d]+) +secs$')
m = p7.match(line)
if m:
neighbor_holdtime = m.groupdict()['holdtime']
# PIM configured DR priority: 144
p8 = re.compile(r'^\s*PIM +configured +DR +priority: +(?P<configured_dr_priority>[\d]+)$')
m = p8.match(line)
if m:
configured_dr_priority = m.groupdict()['configured_dr_priority']
# PIM configured DR delay: 3 secs
p9 = re.compile(r'^\s*PIM +configured +DR +delay: +(?P<configured_dr_delay>[\d]+) +secs$')
m = p9.match(line)
if m:
configured_dr_delay = m.groupdict()['configured_dr_delay']
# PIM border interface: yes
p10 = re.compile(r'^\s*PIM +border +interface: +(?P<border_interface>[\w]+)$')
m = p10.match(line)
if m:
bsr_border = m.groupdict()['border_interface']
# PIM GenID sent in Hellos: 0x26fae674
p11 = re.compile(r'^\s*PIM +GenID +sent +in +Hellos: +(?P<genid>[\S]+)$')
m = p11.match(line)
if m:
genid = m.groupdict()['genid']
# PIM Hello MD5-AH Authentication: disabled
p12 = re.compile(r'^\s*PIM +Hello +MD5-AH +Authentication: +(?P<md5_authentication>[\w]+)$')
m = p12.match(line)
if m:
hello_md5_ah_authentication = m.groupdict()['md5_authentication']
# PIM Neighbor policy: v4neighbor-policy
p13 = re.compile(r'^\s*PIM +Neighbor +policy: +(?P<nbr_policy>(?!none)[\w\-\s]+)$')
m = p13.match(line)
if m:
neighbor_filter = m.groupdict()['nbr_policy']
# PIM Join-Prune inbound policy: v4jp-policy
p14 = re.compile(r'^\s*PIM +Join-Prune +inbound +policy: +(?P<jp_inbound_policy>(?!none)[\w\-\s]+)$')
m = p14.match(line)
if m:
jp_inbound_policy = m.groupdict()['jp_inbound_policy']
# PIM Join-Prune outbound policy: v4jp-policy
p15 = re.compile(r'^\s*PIM +Join-Prune +outbound +policy: +(?P<jp_outbound_policy>(?!none)[\w\-\s]+)$')
m = p15.match(line)
if m:
jp_outbound_policy = m.groupdict()['jp_outbound_policy']
# PIM Join-Prune interval: 1 minutes
p16 = re.compile(r'^\s*PIM +Join-Prune +interval: +(?P<jp_interval>[\d]+) +minutes$')
m = p16.match(line)
if m:
jp_interval = m.groupdict()['jp_interval']
# PIM Join-Prune next sending: 1 minutes
p17 = re.compile(r'^\s*PIM +Join-Prune +next +sending: +(?P<jp_next_sending>[\d]+) +minutes$')
m = p17.match(line)
if m:
jp_next_sending = m.groupdict()['jp_next_sending']
# PIM BFD enabled: no
p18 = re.compile(r'^\s*PIM +BFD +enabled: +(?P<bfd_enabled>[\w]+)$')
m = p18.match(line)
if m:
bfd = m.groupdict()['bfd_enabled']
# PIM passive interface: no
p19 = re.compile(r'^\s*PIM +passive +interface: +(?P<passive>[\w]+)$')
m = p19.match(line)
if m:
passive = m.groupdict()['passive']
# PIM VPC SVI: no
p20 = re.compile(r'^\s*PIM +VPC +SVI: +(?P<vpc_svi>[\w]+)$')
m = p20.match(line)
if m:
vpc_svi = m.groupdict()['vpc_svi']
# PIM Auto Enabled: no
p21 = re.compile(r'^\s*PIM +Auto +Enabled: +(?P<auto_enabled>[\w]+)$')
m = p21.match(line)
if m:
auto_enabled = m.groupdict()['auto_enabled']
# PIM Interface Statistics, last reset: never
# PIM Interface Statistics
p22 = re.compile(r'^\s*PIM +Interface +Statistics+(, +last +reset: +(?P<last_reset>[\w\:]+))?$')
m = p22.match(line)
if m:
statistic = True
if m.groupdict()['last_reset']:
last_rest = m.groupdict()['last_reset']
# Hellos: 360/474 (early: 0), JPs: 0/0, Asserts: 0/0
p23 = re.compile(r'^\s*Hellos: +(?P<hellos>[\d\/]+)'
' +\(early: +(?P<early>\d+)\)?,'
' +JPs: +(?P<jps>[\d\/]+),'
' +Asserts: +(?P<asserts>[\d\/]+)$')
m = p23.match(line)
if m:
hellos = m.groupdict()['hellos']
jps = m.groupdict()['jps']
asserts = m.groupdict()['asserts']
# Grafts: 0/0, Graft-Acks: 0/0
p24 = re.compile(r'^\s*Grafts: +(?P<grafts>[\d\/]+),'
' +Graft-Acks: +(?P<graft_acks>[\d\/]+)$')
m = p24.match(line)
if m:
grafts = m.groupdict()['grafts']
graft_acks = m.groupdict()['graft_acks']
# DF-Offers: 0/0, DF-Winners: 0/0, DF-Backoffs: 0/0, DF-Passes: 0/0
p25 = re.compile(r'^\s*DF-Offers: +(?P<df_offers>[\d\/]+),'
' +DF-Winners: +(?P<df_winners>[\d\/]+),'
' +DF-Backoffs: +(?P<df_backoffs>[\d\/]+),'
' +DF-Passes: +(?P<df_passes>[\d\/]+)$')
m = p25.match(line)
if m:
df_offers = m.groupdict()['df_offers']
df_winners = m.groupdict()['df_winners']
df_backoffs = m.groupdict()['df_backoffs']
df_passes = m.groupdict()['df_passes']
# Checksum errors: 0, Invalid packet types/DF subtypes: 0/0
p26 = re.compile(r'^\s*Checksum errors: +(?P<checksum>[\d]+),'
' +Invalid +packet +types\/DF +subtypes:'
' +(?P<invalid_packet_types>[\d]+)/(?P<invalid_df_subtypes>[\d]+)$')
m = p26.match(line)
if m:
checksum = m.groupdict()['checksum']
invalid_packet_types = m.groupdict()['invalid_packet_types']
invalid_df_subtypes = m.groupdict()['invalid_df_subtypes']
# Authentication failed: 0
p27 = re.compile(r'^\s*Authentication +failed: +(?P<authentication_failed>[\d]+)$')
m = p27.match(line)
if m:
authentication_failed = m.groupdict()['authentication_failed']
# Packet length errors: 0, Bad version packets: 0, Packets from self: 0
p28 = re.compile(r'^\s*Packet +length +errors: +(?P<packet_length_errors>[\d]+),'
' +Bad +version +packets: +(?P<bad_version_packets>[\d]+),'
' +Packets +from +self: +(?P<packets_from_self>[\d]+)$')
m = p28.match(line)
if m:
packet_length_errors = m.groupdict()['packet_length_errors']
bad_version_packets = m.groupdict()['bad_version_packets']
packets_from_self = m.groupdict()['packets_from_self']
# Packets from non-neighbors: 0
p29 = re.compile(r'^\s*Packets +from +non\-neighbors: +(?P<packets_from_non_neighbors>[\d]+)$')
m = p29.match(line)
if m:
packets_from_non_neighbors = m.groupdict()['packets_from_non_neighbors']
# Packets received on passiveinterface: 0
p30 = re.compile(r'^\s*Packets +received +on +passiveinterface:'
' +(?P<packets_received_on_passiveinterface>[\d]+)$')
m = p30.match(line)
if m:
packets_received_on_passiveinterface = m.groupdict()['packets_received_on_passiveinterface']
# JPs received on RPF-interface: 0
p31 = re.compile(r'^\s*JPs +received +on +RPF\-interface:'
' +(?P<jp_received_on_rpf_interface>[\d]+)$')
m = p31.match(line)
if m:
jps_received_on_rpf_interface = m.groupdict()['jp_received_on_rpf_interface']
# (*,G) Joins received with no/wrong RP: 0/0
p32 = re.compile(r'^\s*(?P<s_g>[\w\(\)\,\*]+) +Joins +received +with +no\/+wrong RP:'
' +(?P<joins_received_with_no_rp>\d+)/(?P<joins_received_with_wrong_rp>\d+)$')
m = p32.match(line)
if m:
joins_received_with_no_rp = m.groupdict()['joins_received_with_no_rp']
joins_received_with_wrong_rp = m.groupdict()['joins_received_with_wrong_rp']
# (*,G)/(S,G) JPs received for SSM/Bidir groups: 0/0
p33 = re.compile(r'^\s*(?P<s_g>[\w\(\)\,\*\/]+) +JPs +received +for +SSM\/Bidir +groups:'
' +(?P<joins_received_with_ssm_groups>\d+)/(?P<joins_received_with_bidir_groups>\d+)$')
m = p33.match(line)
if m:
joins_received_with_ssm_groups = m.groupdict()['joins_received_with_ssm_groups']
joins_received_with_bidir_groups = m.groupdict()['joins_received_with_bidir_groups']
# JPs filtered by inbound policy: 0
p34 = re.compile(r'^\s*JPs +filtered +by +inbound +policy:'
' +(?P<jps_filtered_by_inbound_policy>\d+)$')
m = p34.match(line)
if m:
jps_filtered_by_inbound_policy = m.groupdict()['jps_filtered_by_inbound_policy']
# JPs filtered by outbound policy: 0
p35 = re.compile(r'^\s*JPs +filtered +by +outbound +policy:'
' +(?P<jps_filtered_by_outbound_policy>\d+)$')
m = p35.match(line)
if m:
jps_filtered_by_outbound_policy = m.groupdict()['jps_filtered_by_outbound_policy']
if vrf and interface_name:
if 'vrf' not in parsed_dict:
parsed_dict['vrf'] = {}
if vrf not in parsed_dict['vrf']:
parsed_dict['vrf'][vrf] = {}
if 'interfaces' not in parsed_dict['vrf'][vrf]:
parsed_dict['vrf'][vrf]['interfaces'] = {}
if interface_name not in parsed_dict['vrf'][vrf]['interfaces']:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] = {}
if 'address_family' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family']:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] = {}
if oper_status:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['oper_status'] = oper_status
if link_status:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['link_status'] = link_status
if admin_status:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['admin_status'] = admin_status
if address:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['address'] = address.split()
if ip_subnet:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['ip_subnet'] = ip_subnet
if dr_address:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['dr_address'] = dr_address
if dr_priority:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['dr_priority'] = int(dr_priority)
if configured_dr_priority:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['configured_dr_priority'] = int(configured_dr_priority)
if configured_dr_delay:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['dr_delay'] = int(configured_dr_delay)
if nbr_count:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['neighbor_count'] = int(nbr_count)
if hello_interval:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['hello_interval'] = int(hello_interval)
if hello_expiration:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['hello_expiration'] = hello_expiration
if neighbor_holdtime:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['neighbor_holdtime'] = int(neighbor_holdtime)
if dr_priority:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['dr_priority'] = int(dr_priority)
if bsr_border:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['bsr_border'] = True if bsr_border.lower() == 'yes' else False
if genid:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['genid'] = genid
if hello_md5_ah_authentication:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['hello_md5_ah_authentication'] = hello_md5_ah_authentication
if neighbor_filter:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['neighbor_filter'] = neighbor_filter
if jp_inbound_policy:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['jp_inbound_policy'] = jp_inbound_policy
if jp_outbound_policy:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['jp_outbound_policy'] = jp_outbound_policy
if jp_interval:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['jp_interval'] = int(jp_interval)*60
if jp_next_sending:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['jp_next_sending'] = int(jp_next_sending)*60
if bfd:
if 'bfd' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]\
['address_family'][af_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family']\
[af_name]['bfd'] = {}
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['bfd']['enable'] = True if bfd.lower() == 'yes' else False
if passive:
if 'sm' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]\
['address_family'][af_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family']\
[af_name]['sm'] = {}
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['sm']['passive'] = True if passive.lower() == 'yes' else False
if vpc_svi:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['vpc_svi'] = True if vpc_svi.lower() == 'yes' else False
if auto_enabled:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['auto_enabled'] = True if auto_enabled.lower() == 'yes' else False
if hellos or jps or asserts or grafts or graft_acks or df_backoffs or df_passes\
or df_winners or df_offers:
if 'statistics' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]\
['address_family'][af_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics'] = {}
if 'general' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']['general'] = {}
if hellos:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['statistics']['general']['hellos'] = hellos
if jps:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['jps'] = jps
if asserts:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['asserts'] = asserts
if df_offers:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['df_offers'] = df_offers
if graft_acks:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['graft_acks'] = graft_acks
if grafts:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['grafts'] = grafts
if df_backoffs:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['df_backoffs'] = df_backoffs
if df_passes:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['df_passes'] = df_passes
if df_winners:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['general']['df_winners'] = df_winners
if checksum or invalid_packet_types or invalid_df_subtypes or authentication_failed\
or packet_length_errors or bad_version_packets or packets_from_self or \
packets_from_non_neighbors or packets_received_on_passiveinterface or \
jps_received_on_rpf_interface or joins_received_with_bidir_groups or \
joins_received_with_no_rp or joins_received_with_ssm_groups or joins_received_with_wrong_rp or\
jps_filtered_by_inbound_policy or jps_filtered_by_outbound_policy:
if 'statistics' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name]\
['address_family'][af_name]:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics'] = {}
if 'errors' not in parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']['errors'] = {}
if checksum:
parsed_dict['vrf'][vrf]['interfaces'][interface_name] \
['address_family'][af_name]['statistics']['errors']['checksum'] = int(checksum)
if invalid_df_subtypes:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name]\
['statistics']['errors']['invalid_df_subtypes'] = int(invalid_df_subtypes)
if invalid_packet_types:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['invalid_packet_types'] = int(invalid_packet_types)
if authentication_failed:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['authentication_failed'] = int(authentication_failed)
if packet_length_errors:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['packet_length_errors'] = int(packet_length_errors)
if bad_version_packets:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['bad_version_packets'] = int(bad_version_packets)
if packets_from_self:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['packets_from_self'] = int(packets_from_self)
if packets_from_non_neighbors:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['packets_from_non_neighbors'] = int(packets_from_non_neighbors)
if packets_received_on_passiveinterface:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['packets_received_on_passiveinterface']\
= int(packets_received_on_passiveinterface)
if jps_received_on_rpf_interface:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['jps_received_on_rpf_interface'] = int(jps_received_on_rpf_interface)
if joins_received_with_bidir_groups:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['joins_received_with_bidir_groups'] = int(joins_received_with_bidir_groups)
if joins_received_with_no_rp:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['joins_received_with_no_rp'] = int(joins_received_with_no_rp)
if joins_received_with_ssm_groups:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['joins_received_with_ssm_groups'] = int(joins_received_with_ssm_groups)
if joins_received_with_wrong_rp:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['joins_received_with_wrong_rp'] = int(joins_received_with_wrong_rp)
if jps_filtered_by_inbound_policy:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['jps_filtered_by_inbound_policy'] = int(jps_filtered_by_inbound_policy)
if jps_filtered_by_outbound_policy:
parsed_dict['vrf'][vrf]['interfaces'][interface_name]['address_family'][af_name] \
['statistics']['errors']['jps_filtered_by_outbound_policy'] = int(jps_filtered_by_outbound_policy)
continue
return parsed_dict
# ================================
# schema Parser for 'show ipv6 pim vrf all detail'
# ================================
class ShowIpv6PimVrfAllDetailSchema(MetaParser):
"""Schema for show ipv6 pim vrf all detail"""
schema = {
'vrf':{
Any():{
'address_family':{
Any():{
Optional('vrf_id'): int,
Optional('table_id'): str,
Optional('interface_count'): int,
Optional('bfd'):{
Optional('enable'): bool,
},
Optional('state_limit'): str,
Optional('register_rate_limit'): str,
Optional('shared_tree_route_map'): str,
Optional('shared_tree_route_ranges'): str,
Optional('shared_tree_ranges'): str,
},
},
},
},
}
# ==========================================================
# parser for show ipv6 pim vrf all detail
#
# ==========================================================
class ShowIpv6PimVrfAllDetail(ShowIpv6PimVrfAllDetailSchema):
"""Parser for show ipv6 pim vrf all detail"""
cli_command = 'show ipv6 pim vrf all detail'
def cli(self,output=None):
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
af_name = 'ipv6'
# Init dictionary
parsed_dict = dict()
for line in out.splitlines():
line = line.rstrip()
#VRF Name VRF Table Interface BFD
# ID ID Count Enabled
# default 1 0x80000001 3 no
p1 = re.compile(r'^\s*(?P<vrf>[\w\d]+) +(?P<vrf_id>\d+)'
' +(?P<table_id>0x[a_f0-9]+) +(?P<interface_count>\d+)'
' +(?P<bfd>\w+)$')
m = p1.match(line)
if m:
vrf_name = m.groupdict()['vrf']
vrf_id = int(m.groupdict()['vrf_id'])
table_id = m.groupdict()['table_id']
interface_count = int(m.groupdict()['interface_count'])
bfd_enabled = True if m.groupdict()['bfd'].lower() == 'yes' else False
if 'vrf' not in parsed_dict:
parsed_dict['vrf'] = {}
if vrf_name not in parsed_dict['vrf']:
parsed_dict['vrf'][vrf_name] = {}
if 'address_family' not in parsed_dict['vrf'][vrf_name]:
parsed_dict['vrf'][vrf_name]['address_family'] = {}
if af_name not in parsed_dict['vrf'][vrf_name]['address_family']:
parsed_dict['vrf'][vrf_name]['address_family'][af_name] = {}
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['vrf_id'] = vrf_id
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['table_id'] = table_id
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['interface_count'] = interface_count
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['bfd'] = {}
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['bfd']['enable'] = bfd_enabled
continue
# State Limit: None
p2 = re.compile(r'^\s*State +Limit: +(?P<state_limit>(?!None)\w+)$')
m = p2.match(line)
if m:
state_limit = m.groupdict()['state_limit'].lower()
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['state_limit'] = state_limit
continue
# Register Rate Limit: none
p3 = re.compile(r'^\s*Register +Rate +Limit: +(?P<register_rate_limit>(?!none)\w+)$')
m = p3.match(line)
if m:
register_rate_limit = m.groupdict()['register_rate_limit'].lower()
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['register_rate_limit'] = register_rate_limit
continue
# Shared tree route-map: v6spt-threshold-group-list
p4 = re.compile(r'^\s*Shared +tree +route-map: +(?P<route_map>[\w\d\S]+)$')
m = p4.match(line)
if m:
shared_tree_route_map = m.groupdict()['route_map']
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['shared_tree_route_map'] = shared_tree_route_map
continue
# route-ranges: xxxxx
p4 = re.compile(r'^\s*route-ranges:( +(?P<route_range>[\w\d\S]+))?$')
m = p4.match(line)
if m:
if m.groupdict()['route_range']:
shared_tree_route_range = m.groupdict()['route_range']
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['shared_tree_route_ranges'] = shared_tree_route_range
continue
# Shared tree ranges: none
p6 = re.compile(r'^\s*Shared +tree +ranges: +(?P<shared_tree_ranges>(?!none)\w+)$')
m = p6.match(line)
if m:
shared_tree_ranges = m.groupdict()['shared_tree_ranges']
parsed_dict['vrf'][vrf_name]['address_family'] \
[af_name]['shared_tree_ranges'] = shared_tree_ranges
continue
return parsed_dict
# ====================================
# Schema for 'show running-config pim'
# TODO: Add left attributes in PIM
# ====================================
class ShowRunningConfigPimSchema(MetaParser):
"""Schema for show running-config pim"""
schema = {
Optional('feature_pim'): bool,
Optional('feature_pim6'): bool,
Optional('vrf'): {
Any(): {
Optional('address_family'): {
Any(): {
Optional('rp'): {
Optional('autorp'): {
Optional('send_rp_announce'): {
Optional('interface'): str, #send_rp_announce_intf
Optional('group'): str, #send_rp_announce_rp_group
Optional('scope'): int, #send_rp_announce_scope
Optional('group_list'): str, #send_rp_announce_group_list
Optional('route_map'): str, #send_rp_announce_route_map
Optional('prefix_list'): str, #send_rp_announce_prefix_list
Optional('interval'): int, #send_rp_announce_interval
Optional('bidir'): bool, #send_rp_announce_bidir
},
Optional('send_rp_discovery'): {
'interface': str, #send_rp_discovery_intf
Optional('scope'): int, #send_rp_discovery_scope
},
Optional('listener'): bool, #autorp_listener
},
Optional('bsr'): {
Any(): { # bsr_rp_candidate_interface
Optional('interface'): str, # bsr_rp_candidate_interface
Optional('policy'): str, # bsr_rp_candidate_group_list
Optional('mode'): str, # bsr_rp_candidate_bidir
Optional('priority'): int, # bsr_rp_candidate_priority
Optional('interval'): int, # bsr_rp_candidate_interval
Optional('route_map'): str, # bsr_rp_candidate_route_map
Optional('prefix_list'): str, # bsr_rp_candidate_prefix_list
},
},
Optional('static_rp'): {
Any(): { # static_rp_addressl
Optional('policy_name'): str, # static_rp_group_list
Optional('override'): bool, # static_rp_override
Optional('policy'): int, # static_rp_policy
Optional('bidir'): bool, # static_rp_bidir
Optional('route_map'): str, # static_rp_route_map
Optional('prefix_list'): str, # static_rp_prefix_list
},
}
}
}
}
}
}
}
# ====================================
# Parser for 'show running-config pim'
# TODO: Add left attributes in PIM
# ====================================
class ShowRunningConfigPim(ShowRunningConfigPimSchema):
"""Parser for show running-config pim"""
cli_command = ["show running-config {feature}",\
"show running-config {feature} | sec '^i'", \
"show running-config {feature} | sec {vrf}", \
"show running-config {feature} | sec '^i' | inc {pip_str}", \
"show running-config {feature} | sec {vrf} | inc {pip_str}",
"show running-config {feature} | inc {pip_str}"]
def cli(self, address_family=None, pip_str=None, vrf=None):
assert address_family in ['ipv4', 'ipv6', None]
if address_family == 'ipv4':
features = ['pim']
elif address_family == 'ipv6':
features = ['pim6']
else:
features = ['pim', 'pim6']
out = ''
for ft in features:
if vrf and not pip_str :
if vrf == 'default':
# command start with ip pim, or interface without spaces
cmd = self.cli_command[1].format(feature=ft)
else:
cmd = self.cli_command[2].format(vrf=vrf,feature=ft)
if pip_str and vrf:
if vrf == 'default':
cmd = self.cli_command[3].format(pip_str=pip_str,feature=ft)
else:
cmd = self.cli_command[4].format(vrf=vrf,pip_str=pip_str,feature=ft)
if not vrf and not pip_str:
cmd = self.cli_command[0].format(feature=ft)
if not vrf and pip_str:
cmd = self.cli_command[5].format(pip_str=pip_str,feature=ft)
out += '\n' + self.device.execute(cmd)
# Init vars
pim_dict = {}
# initial regular express
# vrf context VRF1
p_vrf = re.compile(r'^vrf +context +(?P<vrf>\S+)$')
# feature pim
p1 = re.compile(r'^feature +pim$')
# feature pim6
p1_1 = re.compile(r'^feature +pim6$')
# ip pim bsr bsr-candidate loopback0 priority 128
p2 = re.compile(r'^ip(v6)? +pim *(bsr)? +bsr\-candidate +(?P<bsr_candidate_interface>[\w\/\.\-]+)'
'( +hash\-len +(?P<bsr_candidate_hash_mask_length>\d+))?'
'( priority +(?P<bsr_candidate_priority>\d+))?')
# ip pim rp-address 10.144.6.6 group-list 234.0.0.0/8
# ip pim rp-address 10.144.6.6 group-list 239.1.1.0/24 bidir
p3 = re.compile(r'^(?P<af>ip|ipv6) +pim +rp\-address +(?P<static_rp_address>[\w\.\:]+) +'
'((group\-list +(?P<static_rp_group_list>[\w\.]+\/\d+))|'
'(route\-map +(?P<static_rp_route_map>\w+))|'
'(prefix\-list +(?P<static_rp_prefix_list>\w+)))'
'( *(?P<dummy>.*))?$$')
# ip pim bsr rp-candidate loopback0 group-list 235.0.0.0/8 priority 128
# ip pim rp-candidate Ethernet1/1 group-list 239.0.0.0/24 priority 10 interval 60 bidir
p4 = re.compile(r'^(?P<af>ip|ipv6) +pim( *bsr)? +rp\-candidate +(?P<bsr_rp_candidate_interface>[\w\/\.\-]+) +'
'((group\-list +(?P<bsr_rp_candidate_group_list>[\w\.]+\/\d+))|'
'(route\-map +(?P<bsr_rp_candidate_route_map>\w+))|'
'(prefix\-list +(?P<bsr_rp_candidate_prefix_list>\w+)))'
'( *(?P<dummy>.*))?$')
# ip pim send-rp-announce loopback0 group-list 236.0.0.0/8
# ---- ipv6 not supported -----
p5 = re.compile(r'^ip +pim +(send\-rp\-announce|(auto\-rp +rp\-candidate)) +'
'((?P<send_rp_announce_intf>(lo|Lo|Eth|eth|Port|port)\w+)|'
'(?P<send_rp_announce_rp_group>(\d+\.){3}\d+)) +'
'((group\-list +(?P<send_rp_announce_group_list>[\w\.]+\/\d+))|'
'(route\-map +(?P<send_rp_announce_route_map>\w+))|'
'(prefix\-list +(?P<send_rp_announce_prefix_list>\w+)))'
'(?P<dummy>.*)?$')
# ip pim send-rp-discovery loopback0
# ip pim send-rp-discovery loopback0 scope 34
# ---- ipv6 not supported -----
p6 = re.compile(r'^ip +pim +send\-rp\-discovery +(?P<send_rp_discovery_intf>[\w\/\.\-]+)'
'( scope +(?P<send_rp_discovery_scope>\d+))?$')
# ip pim ssm range 232.0.0.0/8
p7 = re.compile(r'^$')
# ip pim anycast-rp 10.9.126.126 10.16.2.2
# ip pim anycast-rp 10.9.126.126 10.144.6.6
p8 = re.compile(r'^$')
# ip pim bsr forward listen
p9 = re.compile(r'^$')
# ip pim register-source loopback0
p10 = re.compile(r'^$')
# ip pim auto-rp forward listen
# ---- ipv6 not supported -----
p11 = re.compile(r'^ip +pim +auto\-rp +forward +listen$')
for line in out.splitlines():
if line and not line.startswith(' '):
vrf_dict = pim_dict.setdefault('vrf', {}).setdefault('default', {})
elif vrf:
vrf_dict = pim_dict.setdefault('vrf', {}).setdefault(vrf, {})
line = line.strip()
# vrf context VRF1
m = p_vrf.match(line)
if m:
vrf_dict = pim_dict.setdefault('vrf', {}).setdefault(m.groupdict()['vrf'], {})
continue
# feature pim
m = p1.match(line)
if m:
pim_dict['feature_pim'] = True
continue
# feature pimv6
m = p1_1.match(line)
if m:
pim_dict['feature_pim6'] = True
continue
# ip pim rp-address 10.144.6.6 group-list 234.0.0.0/8
# ip pim rp-address 10.144.6.6 group-list 239.1.1.0/24 bidir
m = p3.match(line)
if m:
groups = m.groupdict()
af = 'ipv4' if groups['af'] == 'ip' else 'ipv6'
rp_dict = vrf_dict.setdefault('address_family', {}).setdefault(af, {})\
.setdefault('rp', {}).setdefault('static_rp', {})\
.setdefault(groups['static_rp_address'], {})
rp_dict.setdefault('policy_name', groups['static_rp_group_list']) \
if groups['static_rp_group_list'] else None
rp_dict.setdefault('route_map', groups['static_rp_route_map']) \
if groups['static_rp_route_map'] else None
rp_dict.setdefault('prefix_list', groups['static_rp_prefix_list']) \
if groups['static_rp_prefix_list'] else None
if 'bidir' in str(groups['dummy']):
rp_dict['bidir'] = True
if 'override' in str(groups['dummy']):
rp_dict['override'] = True
continue
# ip pim bsr rp-candidate loopback0 group-list 235.0.0.0/8 priority 128
# ip pim rp-candidate loopback10 route-map filtera bidir
# ip pim rp-candidate loopback10 prefix-list pfxlista priority 10
# ip pim rp-candidate Ethernet1/1 group-list 239.0.0.0/24 priority 10 interval 60 bidir
m = p4.match(line)
if m:
groups = m.groupdict()
af = 'ipv4' if groups['af'] == 'ip' else 'ipv6'
rp_dict = vrf_dict.setdefault('address_family', {}).setdefault(af, {})\
.setdefault('rp', {}).setdefault('bsr', {})\
.setdefault(groups['bsr_rp_candidate_interface'], {})
rp_dict['interface'] = groups['bsr_rp_candidate_interface']
rp_dict.setdefault('policy', groups['bsr_rp_candidate_group_list']) \
if groups['bsr_rp_candidate_group_list'] else None
rp_dict.setdefault('route_map', groups['bsr_rp_candidate_route_map']) \
if groups['bsr_rp_candidate_route_map'] else None
rp_dict.setdefault('prefix_list', groups['bsr_rp_candidate_prefix_list']) \
if groups['bsr_rp_candidate_prefix_list'] else None
interval = re.search('interval +(\d+)', groups['dummy'])
priority = re.search('priority +(\d+)', groups['dummy'])
bidir = re.search('bidir', groups['dummy'])
if interval:
rp_dict['interval'] = int(interval.groups()[0])
if priority:
rp_dict['priority'] = int(priority.groups()[0])
if bidir:
rp_dict['mode'] = 'bidir'
continue
# ip pim send-rp-announce loopback0 group-list 236.0.0.0/8 bidir interval 60 scope 43
# ip pim send-rp-announce 10.16.2.2 prefix-list abc bidir
m = p5.match(line)
if m:
groups = m.groupdict()
rp_dict = vrf_dict.setdefault('address_family', {}).setdefault('ipv4', {})\
.setdefault('rp', {}).setdefault('autorp', {}).setdefault('send_rp_announce', {})
rp_dict.setdefault('interface', groups['send_rp_announce_intf']) \
if groups['send_rp_announce_intf'] else None
rp_dict.setdefault('group', groups['send_rp_announce_rp_group']) \
if groups['send_rp_announce_rp_group'] else None
rp_dict.setdefault('group_list', groups['send_rp_announce_group_list']) \
if groups['send_rp_announce_group_list'] else None
rp_dict.setdefault('route_map', groups['send_rp_announce_route_map']) \
if groups['send_rp_announce_route_map'] else None
rp_dict.setdefault('prefix_list', groups['send_rp_announce_prefix_list']) \
if groups['send_rp_announce_prefix_list'] else None
interval = re.search('interval +(\d+)', groups['dummy'])
scope = re.search('scope +(\d+)', groups['dummy'])
bidir = re.search('bidir', groups['dummy'])
if interval:
rp_dict['interval'] = int(interval.groups()[0])
if scope:
rp_dict['scope'] = int(scope.groups()[0])
if bidir:
rp_dict['bidir'] = True
continue
# ip pim send-rp-discovery loopback0
# ip pim send-rp-discovery loopback0 scope 34
m = p6.match(line)
if m:
groups = m.groupdict()
rp_dict = vrf_dict.setdefault('address_family', {}).setdefault('ipv4', {}).setdefault('rp', {})\
.setdefault('autorp', {}).setdefault('send_rp_discovery', {})
rp_dict['interface'] = groups['send_rp_discovery_intf']
rp_dict.setdefault('scope', int(groups['send_rp_discovery_scope'])) \
if groups['send_rp_discovery_scope'] else None
continue
# ip pim auto-rp forward listen
m = p11.match(line)
if m:
rp_dict = vrf_dict.setdefault('address_family', {})\
.setdefault('ipv4', {}).setdefault('rp', {}).setdefault('autorp', {})
rp_dict['autorp_listener'] = True
continue
return pim_dict
| 50.052786
| 137
| 0.476465
| 22,343
| 221,884
| 4.463948
| 0.024974
| 0.035072
| 0.060759
| 0.076962
| 0.867733
| 0.838958
| 0.799595
| 0.773366
| 0.757956
| 0.738675
| 0
| 0.012974
| 0.372974
| 221,884
| 4,432
| 138
| 50.064079
| 0.703911
| 0.083458
| 0
| 0.752829
| 0
| 0.003574
| 0.214602
| 0.057587
| 0
| 0
| 0
| 0.000226
| 0.005063
| 1
| 0.005063
| false
| 0.01489
| 0.001191
| 0.000596
| 0.02978
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a6e047d9b61597539022f1fb3400fbfbfc1d546
| 836,031
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_l2vpn_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_l2vpn_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_l2vpn_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_l2vpn_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR l2vpn package configuration.
This module contains definitions
for the following management objects\:
l2vpn\: L2VPN configuration
generic\-interface\-lists\: generic interface lists
evpn\: evpn
This YANG module augments the
Cisco\-IOS\-XR\-ifmgr\-cfg,
Cisco\-IOS\-XR\-snmp\-agent\-cfg
modules with configuration data.
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class BackupDisableEnum(Enum):
"""
BackupDisableEnum
Backup disable
.. data:: never = 0
Never
.. data:: delay = 1
Delay seconds
"""
never = 0
delay = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['BackupDisableEnum']
class BgpRouteDistinguisherEnum(Enum):
"""
BgpRouteDistinguisherEnum
Bgp route distinguisher
.. data:: auto = 1
RD automatically assigned
.. data:: two_byte_as = 2
RD in 2 byte AS:nn format
.. data:: four_byte_as = 3
RD in 4 byte AS:nn format
.. data:: ipv4_address = 4
RD in IpV4address
"""
auto = 1
two_byte_as = 2
four_byte_as = 3
ipv4_address = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['BgpRouteDistinguisherEnum']
class BgpRouteTargetEnum(Enum):
"""
BgpRouteTargetEnum
Bgp route target
.. data:: no_stitching = 0
RT is default type
.. data:: stitching = 1
RT is for stitching (Golf-L2)
"""
no_stitching = 0
stitching = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['BgpRouteTargetEnum']
class BgpRouteTargetFormatEnum(Enum):
"""
BgpRouteTargetFormatEnum
Bgp route target format
.. data:: none = 0
No route target
.. data:: two_byte_as = 1
2 Byte AS:nn format
.. data:: four_byte_as = 2
4 byte AS:nn format
.. data:: ipv4_address = 3
IP:nn format
"""
none = 0
two_byte_as = 1
four_byte_as = 2
ipv4_address = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['BgpRouteTargetFormatEnum']
class BgpRouteTargetRoleEnum(Enum):
"""
BgpRouteTargetRoleEnum
Bgp route target role
.. data:: both = 0
Both Import and export roles
.. data:: import_ = 1
Import role
.. data:: export = 2
Export role
"""
both = 0
import_ = 1
export = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['BgpRouteTargetRoleEnum']
class BridgeDomainTransportModeEnum(Enum):
"""
BridgeDomainTransportModeEnum
Bridge domain transport mode
.. data:: vlan_passthrough = 3
Vlan tagged passthrough mode
"""
vlan_passthrough = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['BridgeDomainTransportModeEnum']
class ControlWordEnum(Enum):
"""
ControlWordEnum
Control word
.. data:: enable = 1
Enable control word
.. data:: disable = 2
Disable control word
"""
enable = 1
disable = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['ControlWordEnum']
class ErpPort1Enum(Enum):
"""
ErpPort1Enum
Erp port1
.. data:: port0 = 0
ERP main port 0
.. data:: port1 = 1
ERP main port 1
"""
port0 = 0
port1 = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['ErpPort1Enum']
class ErpPortEnum(Enum):
"""
ErpPortEnum
Erp port
.. data:: none = 1
ERP port type none
.. data:: virtual = 2
ERP port type virtual
.. data:: interface = 3
ERP port type interface
"""
none = 1
virtual = 2
interface = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['ErpPortEnum']
class ErpapsEnum(Enum):
"""
ErpapsEnum
Erpaps
.. data:: interface = 1
ERP APS type interface
.. data:: bridge_domain = 2
ERP APS type bridge domain
.. data:: xconnect = 3
ERP APS type xconnect
.. data:: none = 4
ERP APS type none
"""
interface = 1
bridge_domain = 2
xconnect = 3
none = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['ErpapsEnum']
class FlowLabelLoadBalanceEnum(Enum):
"""
FlowLabelLoadBalanceEnum
Flow label load balance
.. data:: off = 0
Flow Label load balance is off
.. data:: receive = 1
Delete Flow Label on receive side
.. data:: transmit = 2
Insert Flow Label on transmit side
.. data:: both = 3
Insert/Delete Flow Label on transmit/receive
side
"""
off = 0
receive = 1
transmit = 2
both = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['FlowLabelLoadBalanceEnum']
class FlowLabelTlvCodeEnum(Enum):
"""
FlowLabelTlvCodeEnum
Flow label tlv code
.. data:: Y_17 = 4
Set Flow Label Legacy TLV code (DEPRECATED)
.. data:: disable = 8
Disable Sending Flow Label Legacy TLV
"""
Y_17 = 4
disable = 8
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['FlowLabelTlvCodeEnum']
class InterfaceProfileEnum(Enum):
"""
InterfaceProfileEnum
Interface profile
.. data:: snoop = 1
Set the snooping
.. data:: dhcp_protocol = 2
disable DHCP protocol
"""
snoop = 1
dhcp_protocol = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['InterfaceProfileEnum']
class InterfaceTrafficFloodEnum(Enum):
"""
InterfaceTrafficFloodEnum
Interface traffic flood
.. data:: traffic_flooding = 0
Traffic flooding
.. data:: enable_flooding = 1
Enable Flooding
.. data:: disable_flooding = 2
Disable flooding
"""
traffic_flooding = 0
enable_flooding = 1
disable_flooding = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['InterfaceTrafficFloodEnum']
class InterworkingEnum(Enum):
"""
InterworkingEnum
Interworking
.. data:: ethernet = 1
Ethernet interworking
.. data:: ipv4 = 3
IPv4 interworking
"""
ethernet = 1
ipv4 = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['InterworkingEnum']
class L2EncapsulationEnum(Enum):
"""
L2EncapsulationEnum
L2 encapsulation
.. data:: vlan = 4
Vlan tagged mode
.. data:: ethernet = 5
Ethernet port mode
"""
vlan = 4
ethernet = 5
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2EncapsulationEnum']
class L2TpCookieSizeEnum(Enum):
"""
L2TpCookieSizeEnum
L2tp cookie size
.. data:: zero = 0
Cookie size is zero bytes
.. data:: four = 4
Cookie size is four bytes
.. data:: eight = 8
Cookie size is eight bytes
"""
zero = 0
four = 4
eight = 8
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2TpCookieSizeEnum']
class L2TpSignalingProtocolEnum(Enum):
"""
L2TpSignalingProtocolEnum
L2tp signaling protocol
.. data:: none = 1
No signaling
.. data:: l2tpv3 = 2
L2TPv3
"""
none = 1
l2tpv3 = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2TpSignalingProtocolEnum']
class L2Tpv3SequencingEnum(Enum):
"""
L2Tpv3SequencingEnum
L2tpv3 sequencing
.. data:: off = 0
Sequencing is off
.. data:: both = 4
Sequencing on both transmit and receive side
"""
off = 0
both = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Tpv3SequencingEnum']
class L2VpnCapabilityModeEnum(Enum):
"""
L2VpnCapabilityModeEnum
L2vpn capability mode
.. data:: high_mode = 1
Compute global capability as the highest node
capability
.. data:: single_mode = 2
Disable global capability re-computation
"""
high_mode = 1
single_mode = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2VpnCapabilityModeEnum']
class L2VpnLoggingEnum(Enum):
"""
L2VpnLoggingEnum
L2vpn logging
.. data:: enable = 1
enable logging
.. data:: disable = 2
disable logging
"""
enable = 1
disable = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2VpnLoggingEnum']
class L2VpnVerificationEnum(Enum):
"""
L2VpnVerificationEnum
L2vpn verification
.. data:: enable = 1
enable verification
.. data:: disable = 2
disable verification
"""
enable = 1
disable = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2VpnVerificationEnum']
class LdpVplsIdEnum(Enum):
"""
LdpVplsIdEnum
Ldp vpls id
.. data:: two_byte_as = 10
VPLS-ID in 2 byte AS:nn format
.. data:: ipv4_address = 266
VPLS-ID in IPv4 IP:nn format
"""
two_byte_as = 10
ipv4_address = 266
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['LdpVplsIdEnum']
class LoadBalanceEnum(Enum):
"""
LoadBalanceEnum
Load balance
.. data:: source_dest_mac = 1
Source and Destination MAC hashing
.. data:: source_dest_ip = 2
Source and Destination IP hashing
.. data:: pseudowire_label = 4
PW Label hashing
"""
source_dest_mac = 1
source_dest_ip = 2
pseudowire_label = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['LoadBalanceEnum']
class MacAgingEnum(Enum):
"""
MacAgingEnum
Mac aging
.. data:: absolute = 1
Absolute aging type
.. data:: inactivity = 2
Inactivity aging type
"""
absolute = 1
inactivity = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['MacAgingEnum']
class MacLearnEnum(Enum):
"""
MacLearnEnum
Mac learn
.. data:: default_learning = 0
Mac Learning
.. data:: enable_learning = 1
Enable Learning
.. data:: disable_learning = 2
Disable Learning
"""
default_learning = 0
enable_learning = 1
disable_learning = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['MacLearnEnum']
class MacLimitActionEnum(Enum):
"""
MacLimitActionEnum
Mac limit action
.. data:: none = 0
No action
.. data:: flood = 1
Flood Mac Limit Action
.. data:: no_flood = 2
NoFlood Mac Limit Action
.. data:: shutdown = 3
Shutdown Mac Limit Action
"""
none = 0
flood = 1
no_flood = 2
shutdown = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['MacLimitActionEnum']
class MacNotificationEnum(Enum):
"""
MacNotificationEnum
Mac notification
.. data:: no_notif = 0
No_Notification Trap
.. data:: syslog = 1
syslog message
.. data:: trap = 2
Snmp Trap
.. data:: syslog_snmp = 3
Syslog_snmp Trap
"""
no_notif = 0
syslog = 1
trap = 2
syslog_snmp = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['MacNotificationEnum']
class MacSecureActionEnum(Enum):
"""
MacSecureActionEnum
Mac secure action
.. data:: restrict = 1
MAC Secure Action Restrict
.. data:: none = 2
No Action
.. data:: shutdown = 3
MAC Secure Action Shutdown
"""
restrict = 1
none = 2
shutdown = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['MacSecureActionEnum']
class MacWithdrawBehaviorEnum(Enum):
"""
MacWithdrawBehaviorEnum
Mac withdraw behavior
.. data:: legacy = 1
MAC Withdrawal sent on state-down (legacy)
.. data:: optimized = 2
Optimized MAC Withdrawal
"""
legacy = 1
optimized = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['MacWithdrawBehaviorEnum']
class MplsSequencingEnum(Enum):
"""
MplsSequencingEnum
Mpls sequencing
.. data:: off = 0
Sequencing is off
.. data:: transmit = 1
Sequencing on transmit side
.. data:: receive = 2
Sequencing on receive side
.. data:: both = 4
Sequencing on both transmit and receive side
"""
off = 0
transmit = 1
receive = 2
both = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['MplsSequencingEnum']
class MplsSignalingProtocolEnum(Enum):
"""
MplsSignalingProtocolEnum
Mpls signaling protocol
.. data:: none = 1
No signaling
.. data:: ldp = 4
LDP
"""
none = 1
ldp = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['MplsSignalingProtocolEnum']
class PortDownFlushEnum(Enum):
"""
PortDownFlushEnum
Port down flush
.. data:: port_down_flush = 0
MAC Port Down Flush
.. data:: enable_port_down_flush = 1
Enable Port Down Flush
.. data:: disable_port_down_flush = 2
Disable Port Down Flush
"""
port_down_flush = 0
enable_port_down_flush = 1
disable_port_down_flush = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['PortDownFlushEnum']
class PreferredPathEnum(Enum):
"""
PreferredPathEnum
Preferred path
.. data:: te_tunnel = 2
TE Tunnel
.. data:: ip_tunnel = 3
IP Tunnel
.. data:: tp_tunnel = 4
TP Tunnel
"""
te_tunnel = 2
ip_tunnel = 3
tp_tunnel = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['PreferredPathEnum']
class PwSwitchingPointTlvEnum(Enum):
"""
PwSwitchingPointTlvEnum
Pw switching point tlv
.. data:: hide = 2
Hide TLV
"""
hide = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['PwSwitchingPointTlvEnum']
class RplRoleEnum(Enum):
"""
RplRoleEnum
Rpl role
.. data:: owner = 1
ERP RPL owner
.. data:: neighbor = 2
ERP RPL neighbor
.. data:: next_neighbor = 3
ERP RPL next neighbor
"""
owner = 1
neighbor = 2
next_neighbor = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['RplRoleEnum']
class StormControlEnum(Enum):
"""
StormControlEnum
Storm control
.. data:: unicast = 1
Unknown-unicast Storm Control
.. data:: multicast = 2
Multicast Storm Control
.. data:: broadcast = 4
Broadcast Storm Control
"""
unicast = 1
multicast = 2
broadcast = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['StormControlEnum']
class TransportModeEnum(Enum):
"""
TransportModeEnum
Transport mode
.. data:: ethernet = 1
Ethernet port mode
.. data:: vlan = 2
Vlan tagged mode
.. data:: vlan_passthrough = 3
Vlan tagged passthrough mode
"""
ethernet = 1
vlan = 2
vlan_passthrough = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['TransportModeEnum']
class TypeOfServiceModeEnum(Enum):
"""
TypeOfServiceModeEnum
Type of service mode
.. data:: none = 0
Do not reflect the type of service
.. data:: reflect = 1
Reflect the type of service
"""
none = 0
reflect = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['TypeOfServiceModeEnum']
class VccvVerificationEnum(Enum):
"""
VccvVerificationEnum
Vccv verification
.. data:: none = 0
No connectivity verification over VCCV
.. data:: lsp_ping = 2
LSP Ping over VCCV
"""
none = 0
lsp_ping = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['VccvVerificationEnum']
class L2Vpn(object):
"""
L2VPN configuration
.. attribute:: auto_discovery
Global auto\-discovery attributes
**type**\: :py:class:`AutoDiscovery <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.AutoDiscovery>`
.. attribute:: capability
L2VPN Capability Mode
**type**\: :py:class:`L2VpnCapabilityModeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnCapabilityModeEnum>`
.. attribute:: database
L2VPN databases
**type**\: :py:class:`Database <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database>`
.. attribute:: enable
Enable L2VPN feature
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: l2vpn_router_id
Global L2VPN Router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: load_balance
Enable flow load balancing on l2vpn bridges
**type**\: :py:class:`LoadBalanceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.LoadBalanceEnum>`
.. attribute:: mspw_description
MS\-PW global description
**type**\: str
**length:** 0..64
.. attribute:: mtu_mismatch_ignore
Ignore MTU Mismatch for XCs
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: neighbor
L2VPN neighbor submode
**type**\: :py:class:`Neighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Neighbor>`
.. attribute:: nsr
Enable Non\-Stop Routing
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: pbb
L2VPN PBB Global
**type**\: :py:class:`Pbb <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Pbb>`
.. attribute:: pw_grouping
Enable PW grouping
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: pw_routing
Pseudowire\-routing attributes
**type**\: :py:class:`PwRouting <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.PwRouting>`
.. attribute:: pw_status_disable
Disable PW status
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: pwoam_refresh
Configure PW OAM refresh interval
**type**\: int
**range:** 1..4095
**units**\: second
.. attribute:: snmp
SNMP related configuration
**type**\: :py:class:`Snmp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Snmp>`
.. attribute:: tcn_propagation
Topology change notification propagation
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: utility
L2VPN utilities
**type**\: :py:class:`Utility <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Utility>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.auto_discovery = L2Vpn.AutoDiscovery()
self.auto_discovery.parent = self
self.capability = None
self.database = L2Vpn.Database()
self.database.parent = self
self.enable = None
self.l2vpn_router_id = None
self.load_balance = None
self.mspw_description = None
self.mtu_mismatch_ignore = None
self.neighbor = L2Vpn.Neighbor()
self.neighbor.parent = self
self.nsr = None
self.pbb = L2Vpn.Pbb()
self.pbb.parent = self
self.pw_grouping = None
self.pw_routing = L2Vpn.PwRouting()
self.pw_routing.parent = self
self.pw_status_disable = None
self.pwoam_refresh = None
self.snmp = L2Vpn.Snmp()
self.snmp.parent = self
self.tcn_propagation = None
self.utility = L2Vpn.Utility()
self.utility.parent = self
class PwRouting(object):
"""
Pseudowire\-routing attributes
.. attribute:: pw_routing_bgp
Enable Autodiscovery BGP Pseudowire\-routing BGP
**type**\: :py:class:`PwRoutingBgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.PwRouting.PwRoutingBgp>`
.. attribute:: pw_routing_global_id
Pseudowire\-routing Global ID
**type**\: int
**range:** 1..4294967295
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pw_routing_bgp = L2Vpn.PwRouting.PwRoutingBgp()
self.pw_routing_bgp.parent = self
self.pw_routing_global_id = None
class PwRoutingBgp(object):
"""
Enable Autodiscovery BGP Pseudowire\-routing BGP
.. attribute:: enable
Enable Autodiscovery BGP
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: evpn_route_distinguisher
Route Distinguisher
**type**\: :py:class:`EvpnRouteDistinguisher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.evpn_route_distinguisher = L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher()
self.evpn_route_distinguisher.parent = self
class EvpnRouteDistinguisher(object):
"""
Route Distinguisher
.. attribute:: addr_index
Addr index
**type**\: int
**range:** 0..65535
.. attribute:: address
IPV4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: as_
Two byte or 4 byte AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index
AS\:nn (hex or decimal format)
**type**\: int
**range:** 0..4294967295
.. attribute:: type
Router Distinguisher Type
**type**\: :py:class:`BgpRouteDistinguisherEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteDistinguisherEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.addr_index = None
self.address = None
self.as_ = None
self.as_index = None
self.type = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:pw-routing/Cisco-IOS-XR-l2vpn-cfg:pw-routing-bgp/Cisco-IOS-XR-l2vpn-cfg:evpn-route-distinguisher'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addr_index is not None:
return True
if self.address is not None:
return True
if self.as_ is not None:
return True
if self.as_index is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:pw-routing/Cisco-IOS-XR-l2vpn-cfg:pw-routing-bgp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.evpn_route_distinguisher is not None and self.evpn_route_distinguisher._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.PwRouting.PwRoutingBgp']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:pw-routing'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pw_routing_bgp is not None and self.pw_routing_bgp._has_data():
return True
if self.pw_routing_global_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.PwRouting']['meta_info']
class Neighbor(object):
"""
L2VPN neighbor submode
.. attribute:: ldp_flap
Enable targetted LDP session flap action
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.ldp_flap = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:neighbor'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ldp_flap is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Neighbor']['meta_info']
class Database(object):
"""
L2VPN databases
.. attribute:: bridge_domain_groups
List of bridge groups
**type**\: :py:class:`BridgeDomainGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups>`
.. attribute:: flexible_xconnect_service_table
List of Flexible XConnect Services
**type**\: :py:class:`FlexibleXconnectServiceTable <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.FlexibleXconnectServiceTable>`
.. attribute:: g8032_rings
List of G8032 Ring
**type**\: :py:class:`G8032Rings <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings>`
.. attribute:: pseudowire_classes
List of pseudowire classes
**type**\: :py:class:`PseudowireClasses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses>`
.. attribute:: redundancy
Redundancy groups
**type**\: :py:class:`Redundancy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.Redundancy>`
.. attribute:: xconnect_groups
List of xconnect groups
**type**\: :py:class:`XconnectGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bridge_domain_groups = L2Vpn.Database.BridgeDomainGroups()
self.bridge_domain_groups.parent = self
self.flexible_xconnect_service_table = L2Vpn.Database.FlexibleXconnectServiceTable()
self.flexible_xconnect_service_table.parent = self
self.g8032_rings = L2Vpn.Database.G8032Rings()
self.g8032_rings.parent = self
self.pseudowire_classes = L2Vpn.Database.PseudowireClasses()
self.pseudowire_classes.parent = self
self.redundancy = L2Vpn.Database.Redundancy()
self.redundancy.parent = self
self.xconnect_groups = L2Vpn.Database.XconnectGroups()
self.xconnect_groups.parent = self
class G8032Rings(object):
"""
List of G8032 Ring
.. attribute:: g8032_ring
G8032 Ring
**type**\: list of :py:class:`G8032Ring <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.g8032_ring = YList()
self.g8032_ring.parent = self
self.g8032_ring.name = 'g8032_ring'
class G8032Ring(object):
"""
G8032 Ring
.. attribute:: g8032_ring_name <key>
Name of the G8032 ring
**type**\: str
**length:** 0..32
.. attribute:: erp_instances
List of ethernet ring protection instance
**type**\: :py:class:`ErpInstances <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances>`
.. attribute:: erp_port0s
Ethernet ring protection port0
**type**\: :py:class:`ErpPort0S <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S>`
.. attribute:: erp_port1s
Ethernet ring protection port0
**type**\: :py:class:`ErpPort1S <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S>`
.. attribute:: erp_provider_bridge
Ethernet ring protection provider bridge
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: exclusion_list
Vlan IDs in the format of a\-b,c,d,e\-f,g ,untagged
**type**\: str
.. attribute:: open_ring
Specify the G.8032 instance as open ring
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.g8032_ring_name = None
self.erp_instances = L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances()
self.erp_instances.parent = self
self.erp_port0s = L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S()
self.erp_port0s.parent = self
self.erp_port1s = L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S()
self.erp_port1s.parent = self
self.erp_provider_bridge = None
self.exclusion_list = None
self.open_ring = None
class ErpPort0S(object):
"""
Ethernet ring protection port0
.. attribute:: erp_port0
Configure ERP main port0
**type**\: list of :py:class:`ErpPort0 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.erp_port0 = YList()
self.erp_port0.parent = self
self.erp_port0.name = 'erp_port0'
class ErpPort0(object):
"""
Configure ERP main port0
.. attribute:: interface_name <key>
Port0 interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: monitor
Ethernet ring protection port0 monitor
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.monitor = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:erp-port0[Cisco-IOS-XR-l2vpn-cfg:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.monitor is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:erp-port0s'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.erp_port0 is not None:
for child_ref in self.erp_port0:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S']['meta_info']
class ErpInstances(object):
"""
List of ethernet ring protection instance
.. attribute:: erp_instance
Ethernet ring protection instance
**type**\: list of :py:class:`ErpInstance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.erp_instance = YList()
self.erp_instance.parent = self
self.erp_instance.name = 'erp_instance'
class ErpInstance(object):
"""
Ethernet ring protection instance
.. attribute:: erp_instance_id <key>
ERP instance number
**type**\: int
**range:** 1..2
.. attribute:: aps
Automatic protection switching
**type**\: :py:class:`Aps <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps>`
.. attribute:: description
Ethernet ring protection instance description
**type**\: str
**length:** 0..32
.. attribute:: inclusion_list
Associates a set of VLAN IDs with the G .8032 instance
**type**\: str
.. attribute:: profile
Ethernet ring protection instance profile
**type**\: str
**length:** 0..32
.. attribute:: rpl
Ring protection link
**type**\: :py:class:`Rpl <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.erp_instance_id = None
self.aps = L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps()
self.aps.parent = self
self.description = None
self.inclusion_list = None
self.profile = None
self.rpl = L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl()
self.rpl.parent = self
class Rpl(object):
"""
Ring protection link
.. attribute:: port
ERP main port number
**type**\: :py:class:`ErpPort1Enum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.ErpPort1Enum>`
.. attribute:: role
RPL role
**type**\: :py:class:`RplRoleEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.RplRoleEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.port = None
self.role = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:rpl'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.port is not None:
return True
if self.role is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl']['meta_info']
class Aps(object):
"""
Automatic protection switching
.. attribute:: enable
Enable automatic protection switching
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: level
Automatic protection switching level
**type**\: int
**range:** 0..7
.. attribute:: port0
Port0 APS channel in the format of InterfaceName
**type**\: str
.. attribute:: port1
APS channel for ERP port1
**type**\: :py:class:`Port1 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.level = None
self.port0 = None
self.port1 = L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1()
self.port1.parent = self
class Port1(object):
"""
APS channel for ERP port1
.. attribute:: aps_channel
Port1 APS channel in the format of InterfaceName, BDName or XconnectName
**type**\: str
.. attribute:: aps_type
Port1 APS type
**type**\: :py:class:`ErpapsEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.ErpapsEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.aps_channel = None
self.aps_type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:port1'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.aps_channel is not None:
return True
if self.aps_type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:aps'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.level is not None:
return True
if self.port0 is not None:
return True
if self.port1 is not None and self.port1._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.erp_instance_id is None:
raise YPYModelError('Key property erp_instance_id is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:erp-instance[Cisco-IOS-XR-l2vpn-cfg:erp-instance-id = ' + str(self.erp_instance_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.erp_instance_id is not None:
return True
if self.aps is not None and self.aps._has_data():
return True
if self.description is not None:
return True
if self.inclusion_list is not None:
return True
if self.profile is not None:
return True
if self.rpl is not None and self.rpl._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:erp-instances'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.erp_instance is not None:
for child_ref in self.erp_instance:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances']['meta_info']
class ErpPort1S(object):
"""
Ethernet ring protection port0
.. attribute:: erp_port1
Ethernet ring protection port1
**type**\: list of :py:class:`ErpPort1 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.erp_port1 = YList()
self.erp_port1.parent = self
self.erp_port1.name = 'erp_port1'
class ErpPort1(object):
"""
Ethernet ring protection port1
.. attribute:: erp_port_type <key>
Port1 type
**type**\: :py:class:`ErpPortEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.ErpPortEnum>`
.. attribute:: none
none
**type**\: :py:class:`None_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_>`
**presence node**\: True
.. attribute:: virtual_or_interface
virtual or interface
**type**\: list of :py:class:`VirtualOrInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.erp_port_type = None
self.none = None
self.virtual_or_interface = YList()
self.virtual_or_interface.parent = self
self.virtual_or_interface.name = 'virtual_or_interface'
class None_(object):
"""
none
.. attribute:: monitor
Ethernet ring protection port1 monitor
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.monitor = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:none'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.monitor is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_']['meta_info']
class VirtualOrInterface(object):
"""
virtual or interface
.. attribute:: interface_name <key>
Port1 interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: monitor
Ethernet ring protection port1 monitor
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.monitor = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:virtual-or-interface[Cisco-IOS-XR-l2vpn-cfg:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.monitor is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.erp_port_type is None:
raise YPYModelError('Key property erp_port_type is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:erp-port1[Cisco-IOS-XR-l2vpn-cfg:erp-port-type = ' + str(self.erp_port_type) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.erp_port_type is not None:
return True
if self.none is not None and self.none._has_data():
return True
if self.virtual_or_interface is not None:
for child_ref in self.virtual_or_interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:erp-port1s'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.erp_port1 is not None:
for child_ref in self.erp_port1:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S']['meta_info']
@property
def _common_path(self):
if self.g8032_ring_name is None:
raise YPYModelError('Key property g8032_ring_name is None')
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:g8032-rings/Cisco-IOS-XR-l2vpn-cfg:g8032-ring[Cisco-IOS-XR-l2vpn-cfg:g8032-ring-name = ' + str(self.g8032_ring_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.g8032_ring_name is not None:
return True
if self.erp_instances is not None and self.erp_instances._has_data():
return True
if self.erp_port0s is not None and self.erp_port0s._has_data():
return True
if self.erp_port1s is not None and self.erp_port1s._has_data():
return True
if self.erp_provider_bridge is not None:
return True
if self.exclusion_list is not None:
return True
if self.open_ring is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:g8032-rings'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.g8032_ring is not None:
for child_ref in self.g8032_ring:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.G8032Rings']['meta_info']
class XconnectGroups(object):
"""
List of xconnect groups
.. attribute:: xconnect_group
Xconnect group
**type**\: list of :py:class:`XconnectGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.xconnect_group = YList()
self.xconnect_group.parent = self
self.xconnect_group.name = 'xconnect_group'
class XconnectGroup(object):
"""
Xconnect group
.. attribute:: name <key>
Name of the xconnect group
**type**\: str
**length:** 0..32
.. attribute:: mp2mp_xconnects
List of multi point to multi point xconnects
**type**\: :py:class:`Mp2MpXconnects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects>`
.. attribute:: p2p_xconnects
List of point to point xconnects
**type**\: :py:class:`P2PXconnects <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.mp2mp_xconnects = L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects()
self.mp2mp_xconnects.parent = self
self.p2p_xconnects = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects()
self.p2p_xconnects.parent = self
class P2PXconnects(object):
"""
List of point to point xconnects
.. attribute:: p2p_xconnect
Point to point xconnect
**type**\: list of :py:class:`P2PXconnect <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.p2p_xconnect = YList()
self.p2p_xconnect.parent = self
self.p2p_xconnect.name = 'p2p_xconnect'
class P2PXconnect(object):
"""
Point to point xconnect
.. attribute:: name <key>
Name of the point to point xconnect
**type**\: str
**length:** 0..38
.. attribute:: attachment_circuits
List of attachment circuits
**type**\: :py:class:`AttachmentCircuits <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits>`
.. attribute:: backup_attachment_circuits
List of backup attachment circuits
**type**\: :py:class:`BackupAttachmentCircuits <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits>`
.. attribute:: interworking
Interworking
**type**\: :py:class:`InterworkingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterworkingEnum>`
.. attribute:: monitor_sessions
List of Monitor session segments
**type**\: :py:class:`MonitorSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions>`
.. attribute:: p2p_description
cross connect description Name
**type**\: str
**length:** 0..64
.. attribute:: pseudowire_evpns
List of EVPN Services
**type**\: :py:class:`PseudowireEvpns <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns>`
.. attribute:: pseudowire_routeds
List of pseudowire\-routed
**type**\: :py:class:`PseudowireRouteds <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds>`
.. attribute:: pseudowires
List of pseudowires
**type**\: :py:class:`Pseudowires <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.attachment_circuits = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits()
self.attachment_circuits.parent = self
self.backup_attachment_circuits = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits()
self.backup_attachment_circuits.parent = self
self.interworking = None
self.monitor_sessions = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions()
self.monitor_sessions.parent = self
self.p2p_description = None
self.pseudowire_evpns = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns()
self.pseudowire_evpns.parent = self
self.pseudowire_routeds = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds()
self.pseudowire_routeds.parent = self
self.pseudowires = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires()
self.pseudowires.parent = self
class BackupAttachmentCircuits(object):
"""
List of backup attachment circuits
.. attribute:: backup_attachment_circuit
Backup attachment circuit
**type**\: list of :py:class:`BackupAttachmentCircuit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.backup_attachment_circuit = YList()
self.backup_attachment_circuit.parent = self
self.backup_attachment_circuit.name = 'backup_attachment_circuit'
class BackupAttachmentCircuit(object):
"""
Backup attachment circuit
.. attribute:: interface_name <key>
Name of the attachment circuit interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:backup-attachment-circuit[Cisco-IOS-XR-l2vpn-cfg:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:backup-attachment-circuits'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.backup_attachment_circuit is not None:
for child_ref in self.backup_attachment_circuit:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits']['meta_info']
class PseudowireEvpns(object):
"""
List of EVPN Services
.. attribute:: pseudowire_evpn
EVPN P2P Service Configuration
**type**\: list of :py:class:`PseudowireEvpn <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pseudowire_evpn = YList()
self.pseudowire_evpn.parent = self
self.pseudowire_evpn.name = 'pseudowire_evpn'
class PseudowireEvpn(object):
"""
EVPN P2P Service Configuration
.. attribute:: eviid <key>
Ethernet VPN ID
**type**\: int
**range:** 1..65534
.. attribute:: remote_acid <key>
Remote AC ID
**type**\: int
**range:** 1..16777215
.. attribute:: source_acid <key>
Source AC ID
**type**\: int
**range:** 1..16777215
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.eviid = None
self.remote_acid = None
self.source_acid = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.eviid is None:
raise YPYModelError('Key property eviid is None')
if self.remote_acid is None:
raise YPYModelError('Key property remote_acid is None')
if self.source_acid is None:
raise YPYModelError('Key property source_acid is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-evpn[Cisco-IOS-XR-l2vpn-cfg:eviid = ' + str(self.eviid) + '][Cisco-IOS-XR-l2vpn-cfg:remote-acid = ' + str(self.remote_acid) + '][Cisco-IOS-XR-l2vpn-cfg:source-acid = ' + str(self.source_acid) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.eviid is not None:
return True
if self.remote_acid is not None:
return True
if self.source_acid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-evpns'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pseudowire_evpn is not None:
for child_ref in self.pseudowire_evpn:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns']['meta_info']
class Pseudowires(object):
"""
List of pseudowires
.. attribute:: pseudowire
Pseudowire configuration
**type**\: list of :py:class:`Pseudowire <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pseudowire = YList()
self.pseudowire.parent = self
self.pseudowire.name = 'pseudowire'
class Pseudowire(object):
"""
Pseudowire configuration
.. attribute:: pseudowire_id <key>
Pseudowire ID
**type**\: int
**range:** 1..4294967295
.. attribute:: neighbor
keys\: neighbor
**type**\: list of :py:class:`Neighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor>`
.. attribute:: pseudowire_address
keys\: pseudowire\-address
**type**\: list of :py:class:`PseudowireAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pseudowire_id = None
self.neighbor = YList()
self.neighbor.parent = self
self.neighbor.name = 'neighbor'
self.pseudowire_address = YList()
self.pseudowire_address.parent = self
self.pseudowire_address.name = 'pseudowire_address'
class Neighbor(object):
"""
keys\: neighbor
.. attribute:: neighbor <key>
Pseudowire IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: backup_pseudowires
List of pseudowires
**type**\: :py:class:`BackupPseudowires <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires>`
.. attribute:: bandwidth
Pseudowire Bandwidth
**type**\: int
**range:** 0..4294967295
.. attribute:: class_
Name of the pseudowire class
**type**\: str
**length:** 0..32
.. attribute:: l2tp_static
Pseudowire L2TPv3 static configuration
**type**\: :py:class:`L2TpStatic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic>`
.. attribute:: l2tp_static_attributes
L2TP Static Attributes
**type**\: :py:class:`L2TpStaticAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes>`
.. attribute:: mpls_static_labels
MPLS static labels
**type**\: :py:class:`MplsStaticLabels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels>`
.. attribute:: source_address
Value of the Pseudowire source address. Must be IPv6 only
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: tag_impose
Tag Impose vlan tagged mode
**type**\: int
**range:** 1..4094
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.neighbor = None
self.backup_pseudowires = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires()
self.backup_pseudowires.parent = self
self.bandwidth = None
self.class_ = None
self.l2tp_static = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic()
self.l2tp_static.parent = self
self.l2tp_static_attributes = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes()
self.l2tp_static_attributes.parent = self
self.mpls_static_labels = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels()
self.mpls_static_labels.parent = self
self.source_address = None
self.tag_impose = None
class MplsStaticLabels(object):
"""
MPLS static labels
.. attribute:: local_static_label
Pseudowire local static label
**type**\: int
**range:** 16..1048575
.. attribute:: remote_static_label
Pseudowire remote static label
**type**\: int
**range:** 16..1048575
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_static_label = None
self.remote_static_label = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mpls-static-labels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.local_static_label is not None:
return True
if self.remote_static_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels']['meta_info']
class BackupPseudowires(object):
"""
List of pseudowires
.. attribute:: backup_pseudowire
Backup pseudowire for the cross connect
**type**\: list of :py:class:`BackupPseudowire <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.backup_pseudowire = YList()
self.backup_pseudowire.parent = self
self.backup_pseudowire.name = 'backup_pseudowire'
class BackupPseudowire(object):
"""
Backup pseudowire for the cross connect
.. attribute:: neighbor <key>
Neighbor IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: pseudowire_id <key>
Pseudowire ID
**type**\: int
**range:** 1..4294967295
.. attribute:: backup_mpls_static_labels
MPLS static labels
**type**\: :py:class:`BackupMplsStaticLabels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels>`
.. attribute:: backup_pw_class
PW class template name to use for the backup PW
**type**\: str
**length:** 0..32
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.neighbor = None
self.pseudowire_id = None
self.backup_mpls_static_labels = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels()
self.backup_mpls_static_labels.parent = self
self.backup_pw_class = None
class BackupMplsStaticLabels(object):
"""
MPLS static labels
.. attribute:: local_static_label
Pseudowire local static label
**type**\: int
**range:** 16..1048575
.. attribute:: remote_static_label
Pseudowire remote static label
**type**\: int
**range:** 16..1048575
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_static_label = None
self.remote_static_label = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:backup-mpls-static-labels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.local_static_label is not None:
return True
if self.remote_static_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.neighbor is None:
raise YPYModelError('Key property neighbor is None')
if self.pseudowire_id is None:
raise YPYModelError('Key property pseudowire_id is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:backup-pseudowire[Cisco-IOS-XR-l2vpn-cfg:neighbor = ' + str(self.neighbor) + '][Cisco-IOS-XR-l2vpn-cfg:pseudowire-id = ' + str(self.pseudowire_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.neighbor is not None:
return True
if self.pseudowire_id is not None:
return True
if self.backup_mpls_static_labels is not None and self.backup_mpls_static_labels._has_data():
return True
if self.backup_pw_class is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:backup-pseudowires'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.backup_pseudowire is not None:
for child_ref in self.backup_pseudowire:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires']['meta_info']
class L2TpStaticAttributes(object):
"""
L2TP Static Attributes
.. attribute:: l2tp_local_cookie
L2TP local cookie
**type**\: :py:class:`L2TpLocalCookie <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie>`
.. attribute:: l2tp_local_session_id
L2TP local session ID
**type**\: int
**range:** 1..65535
.. attribute:: l2tp_remote_cookie
L2TP remote cookie
**type**\: :py:class:`L2TpRemoteCookie <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie>`
.. attribute:: l2tp_remote_session_id
L2TP remote session ID
**type**\: int
**range:** 1..65535
.. attribute:: l2tp_secondary_local_cookie
L2TP secondary local cookie
**type**\: :py:class:`L2TpSecondaryLocalCookie <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.l2tp_local_cookie = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie()
self.l2tp_local_cookie.parent = self
self.l2tp_local_session_id = None
self.l2tp_remote_cookie = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie()
self.l2tp_remote_cookie.parent = self
self.l2tp_remote_session_id = None
self.l2tp_secondary_local_cookie = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie()
self.l2tp_secondary_local_cookie.parent = self
class L2TpRemoteCookie(object):
"""
L2TP remote cookie
.. attribute:: higher_value
Higher remote cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: lower_value
Lower remote cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: size
Remote cookie size
**type**\: :py:class:`L2TpCookieSizeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2TpCookieSizeEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.higher_value = None
self.lower_value = None
self.size = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-remote-cookie'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.higher_value is not None:
return True
if self.lower_value is not None:
return True
if self.size is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie']['meta_info']
class L2TpSecondaryLocalCookie(object):
"""
L2TP secondary local cookie
.. attribute:: higher_value
Higher local cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: lower_value
Lower local cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: size
Local cookie size
**type**\: :py:class:`L2TpCookieSizeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2TpCookieSizeEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.higher_value = None
self.lower_value = None
self.size = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-secondary-local-cookie'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.higher_value is not None:
return True
if self.lower_value is not None:
return True
if self.size is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie']['meta_info']
class L2TpLocalCookie(object):
"""
L2TP local cookie
.. attribute:: higher_value
Higher local cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: lower_value
Lower local cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: size
Local cookie size
**type**\: :py:class:`L2TpCookieSizeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2TpCookieSizeEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.higher_value = None
self.lower_value = None
self.size = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-local-cookie'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.higher_value is not None:
return True
if self.lower_value is not None:
return True
if self.size is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-static-attributes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.l2tp_local_cookie is not None and self.l2tp_local_cookie._has_data():
return True
if self.l2tp_local_session_id is not None:
return True
if self.l2tp_remote_cookie is not None and self.l2tp_remote_cookie._has_data():
return True
if self.l2tp_remote_session_id is not None:
return True
if self.l2tp_secondary_local_cookie is not None and self.l2tp_secondary_local_cookie._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
class L2TpStatic(object):
"""
Pseudowire L2TPv3 static configuration
.. attribute:: enable
Enable pseudowire L2TPv3 static configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-static'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.neighbor is None:
raise YPYModelError('Key property neighbor is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:neighbor[Cisco-IOS-XR-l2vpn-cfg:neighbor = ' + str(self.neighbor) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.neighbor is not None:
return True
if self.backup_pseudowires is not None and self.backup_pseudowires._has_data():
return True
if self.bandwidth is not None:
return True
if self.class_ is not None:
return True
if self.l2tp_static is not None and self.l2tp_static._has_data():
return True
if self.l2tp_static_attributes is not None and self.l2tp_static_attributes._has_data():
return True
if self.mpls_static_labels is not None and self.mpls_static_labels._has_data():
return True
if self.source_address is not None:
return True
if self.tag_impose is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
class PseudowireAddress(object):
"""
keys\: pseudowire\-address
.. attribute:: pseudowire_address <key>
Pseudowire IPv6 address. A pseudowire can have only one address\: IPv4 or IPv6
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: backup_pseudowires
List of pseudowires
**type**\: :py:class:`BackupPseudowires <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires>`
.. attribute:: bandwidth
Pseudowire Bandwidth
**type**\: int
**range:** 0..4294967295
.. attribute:: class_
Name of the pseudowire class
**type**\: str
**length:** 0..32
.. attribute:: l2tp_static
Pseudowire L2TPv3 static configuration
**type**\: :py:class:`L2TpStatic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic>`
.. attribute:: l2tp_static_attributes
L2TP Static Attributes
**type**\: :py:class:`L2TpStaticAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes>`
.. attribute:: mpls_static_labels
MPLS static labels
**type**\: :py:class:`MplsStaticLabels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels>`
.. attribute:: source_address
Value of the Pseudowire source address. Must be IPv6 only
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: tag_impose
Tag Impose vlan tagged mode
**type**\: int
**range:** 1..4094
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pseudowire_address = None
self.backup_pseudowires = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires()
self.backup_pseudowires.parent = self
self.bandwidth = None
self.class_ = None
self.l2tp_static = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic()
self.l2tp_static.parent = self
self.l2tp_static_attributes = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes()
self.l2tp_static_attributes.parent = self
self.mpls_static_labels = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels()
self.mpls_static_labels.parent = self
self.source_address = None
self.tag_impose = None
class MplsStaticLabels(object):
"""
MPLS static labels
.. attribute:: local_static_label
Pseudowire local static label
**type**\: int
**range:** 16..1048575
.. attribute:: remote_static_label
Pseudowire remote static label
**type**\: int
**range:** 16..1048575
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_static_label = None
self.remote_static_label = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mpls-static-labels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.local_static_label is not None:
return True
if self.remote_static_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels']['meta_info']
class BackupPseudowires(object):
"""
List of pseudowires
.. attribute:: backup_pseudowire
Backup pseudowire for the cross connect
**type**\: list of :py:class:`BackupPseudowire <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.backup_pseudowire = YList()
self.backup_pseudowire.parent = self
self.backup_pseudowire.name = 'backup_pseudowire'
class BackupPseudowire(object):
"""
Backup pseudowire for the cross connect
.. attribute:: neighbor <key>
Neighbor IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: pseudowire_id <key>
Pseudowire ID
**type**\: int
**range:** 1..4294967295
.. attribute:: backup_mpls_static_labels
MPLS static labels
**type**\: :py:class:`BackupMplsStaticLabels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels>`
.. attribute:: backup_pw_class
PW class template name to use for the backup PW
**type**\: str
**length:** 0..32
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.neighbor = None
self.pseudowire_id = None
self.backup_mpls_static_labels = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels()
self.backup_mpls_static_labels.parent = self
self.backup_pw_class = None
class BackupMplsStaticLabels(object):
"""
MPLS static labels
.. attribute:: local_static_label
Pseudowire local static label
**type**\: int
**range:** 16..1048575
.. attribute:: remote_static_label
Pseudowire remote static label
**type**\: int
**range:** 16..1048575
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_static_label = None
self.remote_static_label = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:backup-mpls-static-labels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.local_static_label is not None:
return True
if self.remote_static_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.neighbor is None:
raise YPYModelError('Key property neighbor is None')
if self.pseudowire_id is None:
raise YPYModelError('Key property pseudowire_id is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:backup-pseudowire[Cisco-IOS-XR-l2vpn-cfg:neighbor = ' + str(self.neighbor) + '][Cisco-IOS-XR-l2vpn-cfg:pseudowire-id = ' + str(self.pseudowire_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.neighbor is not None:
return True
if self.pseudowire_id is not None:
return True
if self.backup_mpls_static_labels is not None and self.backup_mpls_static_labels._has_data():
return True
if self.backup_pw_class is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:backup-pseudowires'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.backup_pseudowire is not None:
for child_ref in self.backup_pseudowire:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires']['meta_info']
class L2TpStaticAttributes(object):
"""
L2TP Static Attributes
.. attribute:: l2tp_local_cookie
L2TP local cookie
**type**\: :py:class:`L2TpLocalCookie <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie>`
.. attribute:: l2tp_local_session_id
L2TP local session ID
**type**\: int
**range:** 1..65535
.. attribute:: l2tp_remote_cookie
L2TP remote cookie
**type**\: :py:class:`L2TpRemoteCookie <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie>`
.. attribute:: l2tp_remote_session_id
L2TP remote session ID
**type**\: int
**range:** 1..65535
.. attribute:: l2tp_secondary_local_cookie
L2TP secondary local cookie
**type**\: :py:class:`L2TpSecondaryLocalCookie <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.l2tp_local_cookie = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie()
self.l2tp_local_cookie.parent = self
self.l2tp_local_session_id = None
self.l2tp_remote_cookie = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie()
self.l2tp_remote_cookie.parent = self
self.l2tp_remote_session_id = None
self.l2tp_secondary_local_cookie = L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie()
self.l2tp_secondary_local_cookie.parent = self
class L2TpRemoteCookie(object):
"""
L2TP remote cookie
.. attribute:: higher_value
Higher remote cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: lower_value
Lower remote cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: size
Remote cookie size
**type**\: :py:class:`L2TpCookieSizeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2TpCookieSizeEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.higher_value = None
self.lower_value = None
self.size = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-remote-cookie'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.higher_value is not None:
return True
if self.lower_value is not None:
return True
if self.size is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie']['meta_info']
class L2TpSecondaryLocalCookie(object):
"""
L2TP secondary local cookie
.. attribute:: higher_value
Higher local cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: lower_value
Lower local cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: size
Local cookie size
**type**\: :py:class:`L2TpCookieSizeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2TpCookieSizeEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.higher_value = None
self.lower_value = None
self.size = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-secondary-local-cookie'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.higher_value is not None:
return True
if self.lower_value is not None:
return True
if self.size is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie']['meta_info']
class L2TpLocalCookie(object):
"""
L2TP local cookie
.. attribute:: higher_value
Higher local cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: lower_value
Lower local cookie value
**type**\: int
**range:** 0..4294967295
.. attribute:: size
Local cookie size
**type**\: :py:class:`L2TpCookieSizeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2TpCookieSizeEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.higher_value = None
self.lower_value = None
self.size = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-local-cookie'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.higher_value is not None:
return True
if self.lower_value is not None:
return True
if self.size is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-static-attributes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.l2tp_local_cookie is not None and self.l2tp_local_cookie._has_data():
return True
if self.l2tp_local_session_id is not None:
return True
if self.l2tp_remote_cookie is not None and self.l2tp_remote_cookie._has_data():
return True
if self.l2tp_remote_session_id is not None:
return True
if self.l2tp_secondary_local_cookie is not None and self.l2tp_secondary_local_cookie._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
class L2TpStatic(object):
"""
Pseudowire L2TPv3 static configuration
.. attribute:: enable
Enable pseudowire L2TPv3 static configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tp-static'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.pseudowire_address is None:
raise YPYModelError('Key property pseudowire_address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-address[Cisco-IOS-XR-l2vpn-cfg:pseudowire-address = ' + str(self.pseudowire_address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pseudowire_address is not None:
return True
if self.backup_pseudowires is not None and self.backup_pseudowires._has_data():
return True
if self.bandwidth is not None:
return True
if self.class_ is not None:
return True
if self.l2tp_static is not None and self.l2tp_static._has_data():
return True
if self.l2tp_static_attributes is not None and self.l2tp_static_attributes._has_data():
return True
if self.mpls_static_labels is not None and self.mpls_static_labels._has_data():
return True
if self.source_address is not None:
return True
if self.tag_impose is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.pseudowire_id is None:
raise YPYModelError('Key property pseudowire_id is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire[Cisco-IOS-XR-l2vpn-cfg:pseudowire-id = ' + str(self.pseudowire_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pseudowire_id is not None:
return True
if self.neighbor is not None:
for child_ref in self.neighbor:
if child_ref._has_data():
return True
if self.pseudowire_address is not None:
for child_ref in self.pseudowire_address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowires'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pseudowire is not None:
for child_ref in self.pseudowire:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires']['meta_info']
class MonitorSessions(object):
"""
List of Monitor session segments
.. attribute:: monitor_session
Monitor session segment
**type**\: list of :py:class:`MonitorSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.monitor_session = YList()
self.monitor_session.parent = self
self.monitor_session.name = 'monitor_session'
class MonitorSession(object):
"""
Monitor session segment
.. attribute:: name <key>
Name of the monitor session
**type**\: str
**length:** 0..64
.. attribute:: enable
Enable monitor session segment
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:monitor-session[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:monitor-sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.monitor_session is not None:
for child_ref in self.monitor_session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions']['meta_info']
class PseudowireRouteds(object):
"""
List of pseudowire\-routed
.. attribute:: pseudowire_routed
Pseudowire configuration
**type**\: list of :py:class:`PseudowireRouted <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pseudowire_routed = YList()
self.pseudowire_routed.parent = self
self.pseudowire_routed.name = 'pseudowire_routed'
class PseudowireRouted(object):
"""
Pseudowire configuration
.. attribute:: acid <key>
Target AC ID
**type**\: int
**range:** 1..4294967295
.. attribute:: global_id <key>
Target Global ID
**type**\: int
**range:** 1..4294967295
.. attribute:: prefix <key>
Target Prefix
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: sacid <key>
Source AC ID
**type**\: int
**range:** 1..4294967295
.. attribute:: class_
Name of the pseudowire class
**type**\: str
**length:** 0..32
.. attribute:: tag_impose
Tag Impose vlan tagged mode
**type**\: int
**range:** 1..4094
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acid = None
self.global_id = None
self.prefix = None
self.sacid = None
self.class_ = None
self.tag_impose = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.acid is None:
raise YPYModelError('Key property acid is None')
if self.global_id is None:
raise YPYModelError('Key property global_id is None')
if self.prefix is None:
raise YPYModelError('Key property prefix is None')
if self.sacid is None:
raise YPYModelError('Key property sacid is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-routed[Cisco-IOS-XR-l2vpn-cfg:acid = ' + str(self.acid) + '][Cisco-IOS-XR-l2vpn-cfg:global-id = ' + str(self.global_id) + '][Cisco-IOS-XR-l2vpn-cfg:prefix = ' + str(self.prefix) + '][Cisco-IOS-XR-l2vpn-cfg:sacid = ' + str(self.sacid) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.acid is not None:
return True
if self.global_id is not None:
return True
if self.prefix is not None:
return True
if self.sacid is not None:
return True
if self.class_ is not None:
return True
if self.tag_impose is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-routeds'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pseudowire_routed is not None:
for child_ref in self.pseudowire_routed:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds']['meta_info']
class AttachmentCircuits(object):
"""
List of attachment circuits
.. attribute:: attachment_circuit
Attachment circuit interface
**type**\: list of :py:class:`AttachmentCircuit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.attachment_circuit = YList()
self.attachment_circuit.parent = self
self.attachment_circuit.name = 'attachment_circuit'
class AttachmentCircuit(object):
"""
Attachment circuit interface
.. attribute:: name <key>
Name of the attachment circuit interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: enable
Enable attachment circuit interface
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:attachment-circuit[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:attachment-circuits'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.attachment_circuit is not None:
for child_ref in self.attachment_circuit:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:p2p-xconnect[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.attachment_circuits is not None and self.attachment_circuits._has_data():
return True
if self.backup_attachment_circuits is not None and self.backup_attachment_circuits._has_data():
return True
if self.interworking is not None:
return True
if self.monitor_sessions is not None and self.monitor_sessions._has_data():
return True
if self.p2p_description is not None:
return True
if self.pseudowire_evpns is not None and self.pseudowire_evpns._has_data():
return True
if self.pseudowire_routeds is not None and self.pseudowire_routeds._has_data():
return True
if self.pseudowires is not None and self.pseudowires._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:p2p-xconnects'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.p2p_xconnect is not None:
for child_ref in self.p2p_xconnect:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects']['meta_info']
class Mp2MpXconnects(object):
"""
List of multi point to multi point xconnects
.. attribute:: mp2mp_xconnect
Multi point to multi point xconnect
**type**\: list of :py:class:`Mp2MpXconnect <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.mp2mp_xconnect = YList()
self.mp2mp_xconnect.parent = self
self.mp2mp_xconnect.name = 'mp2mp_xconnect'
class Mp2MpXconnect(object):
"""
Multi point to multi point xconnect
.. attribute:: name <key>
Name of the multi point to multi point xconnect
**type**\: str
**length:** 0..26
.. attribute:: mp2mp_auto_discovery
auto\-discovery in this MP2MP
**type**\: :py:class:`Mp2MpAutoDiscovery <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery>`
.. attribute:: mp2mp_control_word
Disable control word
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: mp2mp_interworking
Interworking
**type**\: :py:class:`InterworkingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterworkingEnum>`
.. attribute:: mp2mp_shutdown
shutdown this MP2MP VPWS instance
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: mp2mpl2_encapsulation
Configure Layer 2 Encapsulation
**type**\: :py:class:`L2EncapsulationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2EncapsulationEnum>`
.. attribute:: mp2mpmtu
Maximum transmission unit for this MP2MP VPWS instance
**type**\: int
**range:** 64..65535
**units**\: byte
.. attribute:: mp2mpvpn_id
VPN Identifier
**type**\: int
**range:** 1..4294967295
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.mp2mp_auto_discovery = L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery()
self.mp2mp_auto_discovery.parent = self
self.mp2mp_control_word = None
self.mp2mp_interworking = None
self.mp2mp_shutdown = None
self.mp2mpl2_encapsulation = None
self.mp2mpmtu = None
self.mp2mpvpn_id = None
class Mp2MpAutoDiscovery(object):
"""
auto\-discovery in this MP2MP
.. attribute:: enable
Enable auto\-discovery
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: mp2mp_route_policy
Route policy
**type**\: :py:class:`Mp2MpRoutePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy>`
.. attribute:: mp2mp_route_targets
Route Target
**type**\: :py:class:`Mp2MpRouteTargets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets>`
.. attribute:: mp2mp_signaling_protocol
signaling protocol in this MP2MP
**type**\: :py:class:`Mp2MpSignalingProtocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol>`
.. attribute:: route_distinguisher
Route Distinguisher
**type**\: :py:class:`RouteDistinguisher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.mp2mp_route_policy = L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy()
self.mp2mp_route_policy.parent = self
self.mp2mp_route_targets = L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets()
self.mp2mp_route_targets.parent = self
self.mp2mp_signaling_protocol = L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol()
self.mp2mp_signaling_protocol.parent = self
self.route_distinguisher = L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher()
self.route_distinguisher.parent = self
class RouteDistinguisher(object):
"""
Route Distinguisher
.. attribute:: addr_index
Addr index
**type**\: int
**range:** 0..65535
.. attribute:: address
IPV4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: as_
Two byte or 4 byte AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index
AS\:nn (hex or decimal format)
**type**\: int
**range:** 0..4294967295
.. attribute:: type
Router distinguisher type
**type**\: :py:class:`BgpRouteDistinguisherEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteDistinguisherEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.addr_index = None
self.address = None
self.as_ = None
self.as_index = None
self.type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:route-distinguisher'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addr_index is not None:
return True
if self.address is not None:
return True
if self.as_ is not None:
return True
if self.as_index is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher']['meta_info']
class Mp2MpRoutePolicy(object):
"""
Route policy
.. attribute:: export
Export route policy
**type**\: str
.. attribute:: import_
Import route policy
**type**\: str
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.export = None
self.import_ = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mp2mp-route-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.export is not None:
return True
if self.import_ is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy']['meta_info']
class Mp2MpRouteTargets(object):
"""
Route Target
.. attribute:: mp2mp_route_target
Name of the Route Target
**type**\: list of :py:class:`Mp2MpRouteTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.mp2mp_route_target = YList()
self.mp2mp_route_target.parent = self
self.mp2mp_route_target.name = 'mp2mp_route_target'
class Mp2MpRouteTarget(object):
"""
Name of the Route Target
.. attribute:: format <key>
Format of the route target
**type**\: :py:class:`BgpRouteTargetFormatEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteTargetFormatEnum>`
.. attribute:: role <key>
Role of the router target type
**type**\: :py:class:`BgpRouteTargetRoleEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteTargetRoleEnum>`
.. attribute:: ipv4_address
ipv4 address
**type**\: list of :py:class:`Ipv4Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address>`
.. attribute:: two_byte_as_or_four_byte_as
two byte as or four byte as
**type**\: list of :py:class:`TwoByteAsOrFourByteAs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.format = None
self.role = None
self.ipv4_address = YList()
self.ipv4_address.parent = self
self.ipv4_address.name = 'ipv4_address'
self.two_byte_as_or_four_byte_as = YList()
self.two_byte_as_or_four_byte_as.parent = self
self.two_byte_as_or_four_byte_as.name = 'two_byte_as_or_four_byte_as'
class TwoByteAsOrFourByteAs(object):
"""
two byte as or four byte as
.. attribute:: as_ <key>
Two byte or 4 byte AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index <key>
AS\:nn (hex or decimal format)
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.as_ = None
self.as_index = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.as_ is None:
raise YPYModelError('Key property as_ is None')
if self.as_index is None:
raise YPYModelError('Key property as_index is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:two-byte-as-or-four-byte-as[Cisco-IOS-XR-l2vpn-cfg:as = ' + str(self.as_) + '][Cisco-IOS-XR-l2vpn-cfg:as-index = ' + str(self.as_index) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.as_ is not None:
return True
if self.as_index is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs']['meta_info']
class Ipv4Address(object):
"""
ipv4 address
.. attribute:: addr_index <key>
Addr index
**type**\: int
**range:** 0..65535
.. attribute:: address <key>
IPV4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.addr_index = None
self.address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.addr_index is None:
raise YPYModelError('Key property addr_index is None')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:ipv4-address[Cisco-IOS-XR-l2vpn-cfg:addr-index = ' + str(self.addr_index) + '][Cisco-IOS-XR-l2vpn-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addr_index is not None:
return True
if self.address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.format is None:
raise YPYModelError('Key property format is None')
if self.role is None:
raise YPYModelError('Key property role is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mp2mp-route-target[Cisco-IOS-XR-l2vpn-cfg:format = ' + str(self.format) + '][Cisco-IOS-XR-l2vpn-cfg:role = ' + str(self.role) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.format is not None:
return True
if self.role is not None:
return True
if self.ipv4_address is not None:
for child_ref in self.ipv4_address:
if child_ref._has_data():
return True
if self.two_byte_as_or_four_byte_as is not None:
for child_ref in self.two_byte_as_or_four_byte_as:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mp2mp-route-targets'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mp2mp_route_target is not None:
for child_ref in self.mp2mp_route_target:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets']['meta_info']
class Mp2MpSignalingProtocol(object):
"""
signaling protocol in this MP2MP
.. attribute:: ce_range
Local Customer Edge Identifier
**type**\: int
**range:** 11..100
.. attribute:: ceids
Local Customer Edge Identifier Table
**type**\: :py:class:`Ceids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids>`
.. attribute:: enable
Enable signaling protocol
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: flow_label_load_balance
Enable Flow Label based load balancing
**type**\: :py:class:`FlowLabelLoadBalance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.ce_range = None
self.ceids = L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids()
self.ceids.parent = self
self.enable = None
self.flow_label_load_balance = L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance()
self.flow_label_load_balance.parent = self
class FlowLabelLoadBalance(object):
"""
Enable Flow Label based load balancing
.. attribute:: flow_label
Flow Label load balance type
**type**\: :py:class:`FlowLabelLoadBalanceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.FlowLabelLoadBalanceEnum>`
.. attribute:: static
Static Flow Label
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.flow_label = None
self.static = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:flow-label-load-balance'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.flow_label is not None:
return True
if self.static is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance']['meta_info']
class Ceids(object):
"""
Local Customer Edge Identifier Table
.. attribute:: ceid
Local Customer Edge Identifier
**type**\: list of :py:class:`Ceid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.ceid = YList()
self.ceid.parent = self
self.ceid.name = 'ceid'
class Ceid(object):
"""
Local Customer Edge Identifier
.. attribute:: ce_id <key>
Local Customer Edge Identifier
**type**\: int
**range:** 1..16384
.. attribute:: remote_ceid_attachment_circuits
AC And Remote Customer Edge Identifier Table
**type**\: :py:class:`RemoteCeidAttachmentCircuits <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.ce_id = None
self.remote_ceid_attachment_circuits = L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits()
self.remote_ceid_attachment_circuits.parent = self
class RemoteCeidAttachmentCircuits(object):
"""
AC And Remote Customer Edge Identifier
Table
.. attribute:: remote_ceid_attachment_circuit
AC And Remote Customer Edge Identifier
**type**\: list of :py:class:`RemoteCeidAttachmentCircuit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.remote_ceid_attachment_circuit = YList()
self.remote_ceid_attachment_circuit.parent = self
self.remote_ceid_attachment_circuit.name = 'remote_ceid_attachment_circuit'
class RemoteCeidAttachmentCircuit(object):
"""
AC And Remote Customer Edge Identifier
.. attribute:: name <key>
The name of the Attachment Circuit
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: remote_ce_id <key>
Remote Customer Edge Identifier
**type**\: int
**range:** 1..16384
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.remote_ce_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
if self.remote_ce_id is None:
raise YPYModelError('Key property remote_ce_id is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:remote-ceid-attachment-circuit[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + '][Cisco-IOS-XR-l2vpn-cfg:remote-ce-id = ' + str(self.remote_ce_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.remote_ce_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:remote-ceid-attachment-circuits'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.remote_ceid_attachment_circuit is not None:
for child_ref in self.remote_ceid_attachment_circuit:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.ce_id is None:
raise YPYModelError('Key property ce_id is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:ceid[Cisco-IOS-XR-l2vpn-cfg:ce-id = ' + str(self.ce_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ce_id is not None:
return True
if self.remote_ceid_attachment_circuits is not None and self.remote_ceid_attachment_circuits._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:ceids'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ceid is not None:
for child_ref in self.ceid:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mp2mp-signaling-protocol'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ce_range is not None:
return True
if self.ceids is not None and self.ceids._has_data():
return True
if self.enable is not None:
return True
if self.flow_label_load_balance is not None and self.flow_label_load_balance._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mp2mp-auto-discovery'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.mp2mp_route_policy is not None and self.mp2mp_route_policy._has_data():
return True
if self.mp2mp_route_targets is not None and self.mp2mp_route_targets._has_data():
return True
if self.mp2mp_signaling_protocol is not None and self.mp2mp_signaling_protocol._has_data():
return True
if self.route_distinguisher is not None and self.route_distinguisher._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mp2mp-xconnect[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.mp2mp_auto_discovery is not None and self.mp2mp_auto_discovery._has_data():
return True
if self.mp2mp_control_word is not None:
return True
if self.mp2mp_interworking is not None:
return True
if self.mp2mp_shutdown is not None:
return True
if self.mp2mpl2_encapsulation is not None:
return True
if self.mp2mpmtu is not None:
return True
if self.mp2mpvpn_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mp2mp-xconnects'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mp2mp_xconnect is not None:
for child_ref in self.mp2mp_xconnect:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:xconnect-groups/Cisco-IOS-XR-l2vpn-cfg:xconnect-group[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.mp2mp_xconnects is not None and self.mp2mp_xconnects._has_data():
return True
if self.p2p_xconnects is not None and self.p2p_xconnects._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:xconnect-groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.xconnect_group is not None:
for child_ref in self.xconnect_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.XconnectGroups']['meta_info']
class BridgeDomainGroups(object):
"""
List of bridge groups
.. attribute:: bridge_domain_group
Bridge group
**type**\: list of :py:class:`BridgeDomainGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bridge_domain_group = YList()
self.bridge_domain_group.parent = self
self.bridge_domain_group.name = 'bridge_domain_group'
class BridgeDomainGroup(object):
"""
Bridge group
.. attribute:: name <key>
Name of the Bridge group
**type**\: str
**length:** 0..32
.. attribute:: bridge_domains
List of Bridge Domain
**type**\: :py:class:`BridgeDomains <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.bridge_domains = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains()
self.bridge_domains.parent = self
class BridgeDomains(object):
"""
List of Bridge Domain
.. attribute:: bridge_domain
bridge domain
**type**\: list of :py:class:`BridgeDomain <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bridge_domain = YList()
self.bridge_domain.parent = self
self.bridge_domain.name = 'bridge_domain'
class BridgeDomain(object):
"""
bridge domain
.. attribute:: name <key>
Name of the bridge domain
**type**\: str
**length:** 0..27
.. attribute:: bd_attachment_circuits
Attachment Circuit table
**type**\: :py:class:`BdAttachmentCircuits <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits>`
.. attribute:: bd_pseudowire_evpns
List of EVPN pseudowires
**type**\: :py:class:`BdPseudowireEvpns <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns>`
.. attribute:: bd_pseudowires
List of pseudowires
**type**\: :py:class:`BdPseudowires <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires>`
.. attribute:: bd_storm_controls
Storm Control
**type**\: :py:class:`BdStormControls <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls>`
.. attribute:: bridge_domain_evis
Bridge Domain EVI Table
**type**\: :py:class:`BridgeDomainEvis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis>`
.. attribute:: bridge_domain_mac
MAC configuration commands
**type**\: :py:class:`BridgeDomainMac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac>`
.. attribute:: bridge_domain_mtu
Maximum transmission unit for this Bridge Domain
**type**\: int
**range:** 46..65535
**units**\: byte
.. attribute:: bridge_domain_pbb
Bridge Domain PBB
**type**\: :py:class:`BridgeDomainPbb <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb>`
.. attribute:: coupled_mode
Coupled\-mode configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: dai
Dynamic ARP Inspection
**type**\: :py:class:`Dai <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai>`
.. attribute:: dhcp
DHCPv4 Snooping profile name
**type**\: str
**length:** 0..32
.. attribute:: flooding
Disable flooding
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: flooding_unknown_unicast
Disable Unknown Unicast flooding
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: igmp_snooping
Attach IGMP Snooping Profile Name
**type**\: str
**length:** 0..32
.. attribute:: igmp_snooping_disable
Disable IGMP Snooping
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ip_source_guard
IP Source Guard
**type**\: :py:class:`IpSourceGuard <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard>`
.. attribute:: member_vnis
Bridge Domain VxLAN Network Identifier Table
**type**\: :py:class:`MemberVnis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis>`
.. attribute:: mld_snooping
Attach MLD Snooping Profile Name
**type**\: str
**length:** 0..32
.. attribute:: nv_satellite
nV Satellite
**type**\: :py:class:`NvSatellite <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite>`
.. attribute:: routed_interfaces
Bridge Domain Routed Interface Table
**type**\: :py:class:`RoutedInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces>`
.. attribute:: shutdown
shutdown the Bridge Domain
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: transport_mode
Bridge Domain Transport mode
**type**\: :py:class:`BridgeDomainTransportModeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BridgeDomainTransportModeEnum>`
.. attribute:: vfis
Specify the virtual forwarding interface name
**type**\: :py:class:`Vfis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.bd_attachment_circuits = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits()
self.bd_attachment_circuits.parent = self
self.bd_pseudowire_evpns = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns()
self.bd_pseudowire_evpns.parent = self
self.bd_pseudowires = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires()
self.bd_pseudowires.parent = self
self.bd_storm_controls = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls()
self.bd_storm_controls.parent = self
self.bridge_domain_evis = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis()
self.bridge_domain_evis.parent = self
self.bridge_domain_mac = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac()
self.bridge_domain_mac.parent = self
self.bridge_domain_mtu = None
self.bridge_domain_pbb = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb()
self.bridge_domain_pbb.parent = self
self.coupled_mode = None
self.dai = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai()
self.dai.parent = self
self.dhcp = None
self.flooding = None
self.flooding_unknown_unicast = None
self.igmp_snooping = None
self.igmp_snooping_disable = None
self.ip_source_guard = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard()
self.ip_source_guard.parent = self
self.member_vnis = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis()
self.member_vnis.parent = self
self.mld_snooping = None
self.nv_satellite = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite()
self.nv_satellite.parent = self
self.routed_interfaces = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces()
self.routed_interfaces.parent = self
self.shutdown = None
self.transport_mode = None
self.vfis = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis()
self.vfis.parent = self
class BdStormControls(object):
"""
Storm Control
.. attribute:: bd_storm_control
Storm Control Type
**type**\: list of :py:class:`BdStormControl <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_storm_control = YList()
self.bd_storm_control.parent = self
self.bd_storm_control.name = 'bd_storm_control'
class BdStormControl(object):
"""
Storm Control Type
.. attribute:: sctype <key>
Storm Control Type
**type**\: :py:class:`StormControlEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.StormControlEnum>`
.. attribute:: storm_control_unit
Specify units for Storm Control Configuration
**type**\: :py:class:`StormControlUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.sctype = None
self.storm_control_unit = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit()
self.storm_control_unit.parent = self
class StormControlUnit(object):
"""
Specify units for Storm Control Configuration
.. attribute:: kbits_per_sec
Kilobits Per Second, PktsPerSec and KbitsPerSec cannot be configured together
**type**\: int
**range:** 64..1280000
**units**\: kbit/s
.. attribute:: pkts_per_sec
Packets Per Second, PktsPerSec and KbitsPerSec cannot be configured together
**type**\: int
**range:** 1..160000
**units**\: packet/s
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.kbits_per_sec = None
self.pkts_per_sec = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:storm-control-unit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.kbits_per_sec is not None:
return True
if self.pkts_per_sec is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.sctype is None:
raise YPYModelError('Key property sctype is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-storm-control[Cisco-IOS-XR-l2vpn-cfg:sctype = ' + str(self.sctype) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.sctype is not None:
return True
if self.storm_control_unit is not None and self.storm_control_unit._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-storm-controls'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_storm_control is not None:
for child_ref in self.bd_storm_control:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls']['meta_info']
class MemberVnis(object):
"""
Bridge Domain VxLAN Network Identifier
Table
.. attribute:: member_vni
Bridge Domain Member VxLAN Network Identifier
**type**\: list of :py:class:`MemberVni <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.member_vni = YList()
self.member_vni.parent = self
self.member_vni.name = 'member_vni'
class MemberVni(object):
"""
Bridge Domain Member VxLAN Network
Identifier
.. attribute:: vni <key>
VxLAN Network Identifier number
**type**\: int
**range:** 1..16777215
.. attribute:: member_vni_static_mac_addresses
Static Mac Address Table
**type**\: :py:class:`MemberVniStaticMacAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.vni = None
self.member_vni_static_mac_addresses = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses()
self.member_vni_static_mac_addresses.parent = self
class MemberVniStaticMacAddresses(object):
"""
Static Mac Address Table
.. attribute:: member_vni_static_mac_address
Static Mac Address Configuration
**type**\: list of :py:class:`MemberVniStaticMacAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.member_vni_static_mac_address = YList()
self.member_vni_static_mac_address.parent = self
self.member_vni_static_mac_address.name = 'member_vni_static_mac_address'
class MemberVniStaticMacAddress(object):
"""
Static Mac Address Configuration
.. attribute:: mac_address <key>
Static MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: next_hop_ip
Enable Static Mac Address Configuration
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.mac_address = None
self.next_hop_ip = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.mac_address is None:
raise YPYModelError('Key property mac_address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:member-vni-static-mac-address[Cisco-IOS-XR-l2vpn-cfg:mac-address = ' + str(self.mac_address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mac_address is not None:
return True
if self.next_hop_ip is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:member-vni-static-mac-addresses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.member_vni_static_mac_address is not None:
for child_ref in self.member_vni_static_mac_address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.vni is None:
raise YPYModelError('Key property vni is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:member-vni[Cisco-IOS-XR-l2vpn-cfg:vni = ' + str(self.vni) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vni is not None:
return True
if self.member_vni_static_mac_addresses is not None and self.member_vni_static_mac_addresses._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:member-vnis'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.member_vni is not None:
for child_ref in self.member_vni:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis']['meta_info']
class BridgeDomainMac(object):
"""
MAC configuration commands
.. attribute:: bd_mac_aging
MAC\-Aging configuration commands
**type**\: :py:class:`BdMacAging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging>`
.. attribute:: bd_mac_filters
Filter Mac Address
**type**\: :py:class:`BdMacFilters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters>`
.. attribute:: bd_mac_learn
Enable Mac Learning
**type**\: :py:class:`MacLearnEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLearnEnum>`
.. attribute:: bd_mac_limit
MAC\-Limit configuration commands
**type**\: :py:class:`BdMacLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit>`
.. attribute:: bd_mac_port_down_flush
Disable MAC Flush when Port goes Down
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: bd_mac_withdraw
Disable Mac Withdraw
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: bd_mac_withdraw_access_pw_disable
MAC withdraw on Access PW
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: bd_mac_withdraw_behavior
MAC withdraw sent on bridge port down
**type**\: :py:class:`MacWithdrawBehaviorEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacWithdrawBehaviorEnum>`
.. attribute:: bd_mac_withdraw_relay
Mac withdraw sent from access PW to access PW
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: mac_secure
MAC Secure
**type**\: :py:class:`MacSecure <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_mac_aging = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging()
self.bd_mac_aging.parent = self
self.bd_mac_filters = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters()
self.bd_mac_filters.parent = self
self.bd_mac_learn = None
self.bd_mac_limit = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit()
self.bd_mac_limit.parent = self
self.bd_mac_port_down_flush = None
self.bd_mac_withdraw = None
self.bd_mac_withdraw_access_pw_disable = None
self.bd_mac_withdraw_behavior = None
self.bd_mac_withdraw_relay = None
self.mac_secure = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure()
self.mac_secure.parent = self
class BdMacLimit(object):
"""
MAC\-Limit configuration commands
.. attribute:: bd_mac_limit_action
MAC address limit enforcement action
**type**\: :py:class:`MacLimitActionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLimitActionEnum>`
.. attribute:: bd_mac_limit_max
Number of MAC addresses after which MAC limit action is taken
**type**\: int
**range:** 0..4294967295
.. attribute:: bd_mac_limit_notif
Mac Address Limit Notification
**type**\: :py:class:`MacNotificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacNotificationEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_mac_limit_action = None
self.bd_mac_limit_max = None
self.bd_mac_limit_notif = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-mac-limit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_mac_limit_action is not None:
return True
if self.bd_mac_limit_max is not None:
return True
if self.bd_mac_limit_notif is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit']['meta_info']
class BdMacFilters(object):
"""
Filter Mac Address
.. attribute:: bd_mac_filter
Static MAC address
**type**\: list of :py:class:`BdMacFilter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_mac_filter = YList()
self.bd_mac_filter.parent = self
self.bd_mac_filter.name = 'bd_mac_filter'
class BdMacFilter(object):
"""
Static MAC address
.. attribute:: address <key>
Static MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: drop
MAC address for filtering
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address = None
self.drop = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-mac-filter[Cisco-IOS-XR-l2vpn-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
if self.drop is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-mac-filters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_mac_filter is not None:
for child_ref in self.bd_mac_filter:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters']['meta_info']
class MacSecure(object):
"""
MAC Secure
.. attribute:: action
MAC secure enforcement action
**type**\: :py:class:`MacSecureActionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacSecureActionEnum>`
.. attribute:: enable
Enable MAC Secure
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: logging
MAC Secure Logging
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.action = None
self.enable = None
self.logging = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mac-secure'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.action is not None:
return True
if self.enable is not None:
return True
if self.logging is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure']['meta_info']
class BdMacAging(object):
"""
MAC\-Aging configuration commands
.. attribute:: bd_mac_aging_time
Mac Aging Time
**type**\: int
**range:** 300..30000
.. attribute:: bd_mac_aging_type
MAC address aging type
**type**\: :py:class:`MacAgingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacAgingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_mac_aging_time = None
self.bd_mac_aging_type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-mac-aging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_mac_aging_time is not None:
return True
if self.bd_mac_aging_type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bridge-domain-mac'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_mac_aging is not None and self.bd_mac_aging._has_data():
return True
if self.bd_mac_filters is not None and self.bd_mac_filters._has_data():
return True
if self.bd_mac_learn is not None:
return True
if self.bd_mac_limit is not None and self.bd_mac_limit._has_data():
return True
if self.bd_mac_port_down_flush is not None:
return True
if self.bd_mac_withdraw is not None:
return True
if self.bd_mac_withdraw_access_pw_disable is not None:
return True
if self.bd_mac_withdraw_behavior is not None:
return True
if self.bd_mac_withdraw_relay is not None:
return True
if self.mac_secure is not None and self.mac_secure._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
class NvSatellite(object):
"""
nV Satellite
.. attribute:: enable
Enable nV Satellite Settings
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: offload_ipv4_multicast_enable
Enable IPv4 Multicast Offload to Satellite Nodes
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.offload_ipv4_multicast_enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:nv-satellite'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.offload_ipv4_multicast_enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite']['meta_info']
class BridgeDomainPbb(object):
"""
Bridge Domain PBB
.. attribute:: pbb_core
PBB Core
**type**\: :py:class:`PbbCore <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore>`
.. attribute:: pbb_edges
PBB Edge
**type**\: :py:class:`PbbEdges <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_core = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore()
self.pbb_core.parent = self
self.pbb_edges = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges()
self.pbb_edges.parent = self
class PbbEdges(object):
"""
PBB Edge
.. attribute:: pbb_edge
Configure BD as PBB Edge with ISID and associated PBB Core BD
**type**\: list of :py:class:`PbbEdge <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_edge = YList()
self.pbb_edge.parent = self
self.pbb_edge.name = 'pbb_edge'
class PbbEdge(object):
"""
Configure BD as PBB Edge with ISID and
associated PBB Core BD
.. attribute:: core_bd_name <key>
Core BD Name
**type**\: str
**length:** 0..27
.. attribute:: isid <key>
ISID
**type**\: int
**range:** 256..16777214
.. attribute:: pbb_edge_dhcp_profile
Attach a DHCP profile
**type**\: :py:class:`PbbEdgeDhcpProfile <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile>`
.. attribute:: pbb_edge_igmp_profile
Attach a IGMP Snooping profile
**type**\: str
**length:** 0..32
.. attribute:: pbb_edge_mac
MAC configuration commands
**type**\: :py:class:`PbbEdgeMac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac>`
.. attribute:: pbb_static_mac_mappings
PBB Static Mac Address Mapping Table
**type**\: :py:class:`PbbStaticMacMappings <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings>`
.. attribute:: unknown_unicast_bmac
Configure Unknown Unicast BMAC address for PBB Edge Port
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.core_bd_name = None
self.isid = None
self.pbb_edge_dhcp_profile = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile()
self.pbb_edge_dhcp_profile.parent = self
self.pbb_edge_igmp_profile = None
self.pbb_edge_mac = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac()
self.pbb_edge_mac.parent = self
self.pbb_static_mac_mappings = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings()
self.pbb_static_mac_mappings.parent = self
self.unknown_unicast_bmac = None
class PbbStaticMacMappings(object):
"""
PBB Static Mac Address Mapping Table
.. attribute:: pbb_static_mac_mapping
PBB Static Mac Address Mapping Configuration
**type**\: list of :py:class:`PbbStaticMacMapping <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_static_mac_mapping = YList()
self.pbb_static_mac_mapping.parent = self
self.pbb_static_mac_mapping.name = 'pbb_static_mac_mapping'
class PbbStaticMacMapping(object):
"""
PBB Static Mac Address Mapping
Configuration
.. attribute:: address <key>
Static MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: bmac
Backbone MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
**mandatory**\: True
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address = None
self.bmac = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-static-mac-mapping[Cisco-IOS-XR-l2vpn-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
if self.bmac is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-static-mac-mappings'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_static_mac_mapping is not None:
for child_ref in self.pbb_static_mac_mapping:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings']['meta_info']
class PbbEdgeDhcpProfile(object):
"""
Attach a DHCP profile
.. attribute:: dhcp_snooping_id
Disable DHCP snooping
**type**\: str
.. attribute:: profile_id
Set the snooping profile
**type**\: :py:class:`InterfaceProfileEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterfaceProfileEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.dhcp_snooping_id = None
self.profile_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-edge-dhcp-profile'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dhcp_snooping_id is not None:
return True
if self.profile_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile']['meta_info']
class PbbEdgeMac(object):
"""
MAC configuration commands
.. attribute:: pbb_edge_mac_aging
MAC\-Aging configuration commands
**type**\: :py:class:`PbbEdgeMacAging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging>`
.. attribute:: pbb_edge_mac_learning
Enable Mac Learning
**type**\: :py:class:`MacLearnEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLearnEnum>`
.. attribute:: pbb_edge_mac_limit
MAC\-Limit configuration commands
**type**\: :py:class:`PbbEdgeMacLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit>`
.. attribute:: pbb_edge_mac_secure
MAC Secure
**type**\: :py:class:`PbbEdgeMacSecure <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_edge_mac_aging = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging()
self.pbb_edge_mac_aging.parent = self
self.pbb_edge_mac_learning = None
self.pbb_edge_mac_limit = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit()
self.pbb_edge_mac_limit.parent = self
self.pbb_edge_mac_secure = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure()
self.pbb_edge_mac_secure.parent = self
class PbbEdgeMacLimit(object):
"""
MAC\-Limit configuration commands
.. attribute:: pbb_edge_mac_limit_action
MAC address limit enforcement action
**type**\: :py:class:`MacLimitActionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLimitActionEnum>`
.. attribute:: pbb_edge_mac_limit_max
Number of MAC addresses after which MAC limit action is taken
**type**\: int
**range:** 0..4294967295
.. attribute:: pbb_edge_mac_limit_notif
MAC address limit notification action
**type**\: :py:class:`MacNotificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacNotificationEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_edge_mac_limit_action = None
self.pbb_edge_mac_limit_max = None
self.pbb_edge_mac_limit_notif = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-edge-mac-limit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_edge_mac_limit_action is not None:
return True
if self.pbb_edge_mac_limit_max is not None:
return True
if self.pbb_edge_mac_limit_notif is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit']['meta_info']
class PbbEdgeMacAging(object):
"""
MAC\-Aging configuration commands
.. attribute:: pbb_edge_mac_aging_time
Mac Aging Time
**type**\: int
**range:** 300..30000
.. attribute:: pbb_edge_mac_aging_type
MAC address aging type
**type**\: :py:class:`MacAgingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacAgingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_edge_mac_aging_time = None
self.pbb_edge_mac_aging_type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-edge-mac-aging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_edge_mac_aging_time is not None:
return True
if self.pbb_edge_mac_aging_type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging']['meta_info']
class PbbEdgeMacSecure(object):
"""
MAC Secure
.. attribute:: accept_shutdown
Accept Virtual instance port to be shutdown on mac violation
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: action
MAC secure enforcement action
**type**\: :py:class:`MacSecureActionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacSecureActionEnum>`
.. attribute:: disable
Disable Virtual instance port MAC Secure
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable
Enable MAC Secure
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: logging
MAC Secure Logging
**type**\: :py:class:`L2VpnLoggingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnLoggingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.accept_shutdown = None
self.action = None
self.disable = None
self.enable = None
self.logging = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-edge-mac-secure'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.accept_shutdown is not None:
return True
if self.action is not None:
return True
if self.disable is not None:
return True
if self.enable is not None:
return True
if self.logging is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-edge-mac'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_edge_mac_aging is not None and self.pbb_edge_mac_aging._has_data():
return True
if self.pbb_edge_mac_learning is not None:
return True
if self.pbb_edge_mac_limit is not None and self.pbb_edge_mac_limit._has_data():
return True
if self.pbb_edge_mac_secure is not None and self.pbb_edge_mac_secure._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.core_bd_name is None:
raise YPYModelError('Key property core_bd_name is None')
if self.isid is None:
raise YPYModelError('Key property isid is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-edge[Cisco-IOS-XR-l2vpn-cfg:core-bd-name = ' + str(self.core_bd_name) + '][Cisco-IOS-XR-l2vpn-cfg:isid = ' + str(self.isid) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.core_bd_name is not None:
return True
if self.isid is not None:
return True
if self.pbb_edge_dhcp_profile is not None and self.pbb_edge_dhcp_profile._has_data():
return True
if self.pbb_edge_igmp_profile is not None:
return True
if self.pbb_edge_mac is not None and self.pbb_edge_mac._has_data():
return True
if self.pbb_static_mac_mappings is not None and self.pbb_static_mac_mappings._has_data():
return True
if self.unknown_unicast_bmac is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-edges'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_edge is not None:
for child_ref in self.pbb_edge:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges']['meta_info']
class PbbCore(object):
"""
PBB Core
.. attribute:: enable
Enable Bridge Domain PBB Core Configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: pbb_core_dhcp_profile
Attach a DHCP profile
**type**\: :py:class:`PbbCoreDhcpProfile <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile>`
.. attribute:: pbb_core_evis
PBB Core EVI Table
**type**\: :py:class:`PbbCoreEvis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis>`
.. attribute:: pbb_core_igmp_profile
Attach a IGMP Snooping profile
**type**\: str
**length:** 0..32
.. attribute:: pbb_core_mac
MAC configuration commands
**type**\: :py:class:`PbbCoreMac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac>`
.. attribute:: pbb_core_mmrp_flood_optimization
Enabling MMRP PBB\-VPLS Flood Optimization
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vlan_id
VLAN ID to push
**type**\: int
**range:** 1..4094
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.pbb_core_dhcp_profile = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile()
self.pbb_core_dhcp_profile.parent = self
self.pbb_core_evis = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis()
self.pbb_core_evis.parent = self
self.pbb_core_igmp_profile = None
self.pbb_core_mac = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac()
self.pbb_core_mac.parent = self
self.pbb_core_mmrp_flood_optimization = None
self.vlan_id = None
class PbbCoreMac(object):
"""
MAC configuration commands
.. attribute:: pbb_core_mac_aging
MAC\-Aging configuration commands
**type**\: :py:class:`PbbCoreMacAging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging>`
.. attribute:: pbb_core_mac_learning
Enable Mac Learning
**type**\: :py:class:`MacLearnEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLearnEnum>`
.. attribute:: pbb_core_mac_limit
MAC\-Limit configuration commands
**type**\: :py:class:`PbbCoreMacLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_core_mac_aging = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging()
self.pbb_core_mac_aging.parent = self
self.pbb_core_mac_learning = None
self.pbb_core_mac_limit = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit()
self.pbb_core_mac_limit.parent = self
class PbbCoreMacAging(object):
"""
MAC\-Aging configuration commands
.. attribute:: pbb_core_mac_aging_time
Mac Aging Time
**type**\: int
**range:** 300..30000
.. attribute:: pbb_core_mac_aging_type
MAC address aging type
**type**\: :py:class:`MacAgingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacAgingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_core_mac_aging_time = None
self.pbb_core_mac_aging_type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-core-mac-aging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_core_mac_aging_time is not None:
return True
if self.pbb_core_mac_aging_type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging']['meta_info']
class PbbCoreMacLimit(object):
"""
MAC\-Limit configuration commands
.. attribute:: pbb_core_mac_limit_action
MAC address limit enforcement action
**type**\: :py:class:`MacLimitActionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLimitActionEnum>`
.. attribute:: pbb_core_mac_limit_max
Number of MAC addresses after which MAC limit action is taken
**type**\: int
**range:** 0..4294967295
.. attribute:: pbb_core_mac_limit_notif
MAC address limit notification action
**type**\: :py:class:`MacNotificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacNotificationEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_core_mac_limit_action = None
self.pbb_core_mac_limit_max = None
self.pbb_core_mac_limit_notif = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-core-mac-limit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_core_mac_limit_action is not None:
return True
if self.pbb_core_mac_limit_max is not None:
return True
if self.pbb_core_mac_limit_notif is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-core-mac'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_core_mac_aging is not None and self.pbb_core_mac_aging._has_data():
return True
if self.pbb_core_mac_learning is not None:
return True
if self.pbb_core_mac_limit is not None and self.pbb_core_mac_limit._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info']
class PbbCoreEvis(object):
"""
PBB Core EVI Table
.. attribute:: pbb_core_evi
PBB Core EVI
**type**\: list of :py:class:`PbbCoreEvi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pbb_core_evi = YList()
self.pbb_core_evi.parent = self
self.pbb_core_evi.name = 'pbb_core_evi'
class PbbCoreEvi(object):
"""
PBB Core EVI
.. attribute:: eviid <key>
Ethernet VPN ID
**type**\: int
**range:** 1..4294967295
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.eviid = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.eviid is None:
raise YPYModelError('Key property eviid is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-core-evi[Cisco-IOS-XR-l2vpn-cfg:eviid = ' + str(self.eviid) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.eviid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-core-evis'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_core_evi is not None:
for child_ref in self.pbb_core_evi:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis']['meta_info']
class PbbCoreDhcpProfile(object):
"""
Attach a DHCP profile
.. attribute:: dhcp_snooping_id
Disable DHCP snooping
**type**\: str
.. attribute:: profile_id
Set the snooping profile
**type**\: :py:class:`InterfaceProfileEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterfaceProfileEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.dhcp_snooping_id = None
self.profile_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-core-dhcp-profile'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dhcp_snooping_id is not None:
return True
if self.profile_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pbb-core'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.pbb_core_dhcp_profile is not None and self.pbb_core_dhcp_profile._has_data():
return True
if self.pbb_core_evis is not None and self.pbb_core_evis._has_data():
return True
if self.pbb_core_igmp_profile is not None:
return True
if self.pbb_core_mac is not None and self.pbb_core_mac._has_data():
return True
if self.pbb_core_mmrp_flood_optimization is not None:
return True
if self.vlan_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bridge-domain-pbb'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pbb_core is not None and self.pbb_core._has_data():
return True
if self.pbb_edges is not None and self.pbb_edges._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info']
class BridgeDomainEvis(object):
"""
Bridge Domain EVI Table
.. attribute:: bridge_domain_evi
Bridge Domain EVI
**type**\: list of :py:class:`BridgeDomainEvi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bridge_domain_evi = YList()
self.bridge_domain_evi.parent = self
self.bridge_domain_evi.name = 'bridge_domain_evi'
class BridgeDomainEvi(object):
"""
Bridge Domain EVI
.. attribute:: eviid <key>
Ethernet VPN ID
**type**\: int
**range:** 1..4294967295
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.eviid = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.eviid is None:
raise YPYModelError('Key property eviid is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bridge-domain-evi[Cisco-IOS-XR-l2vpn-cfg:eviid = ' + str(self.eviid) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.eviid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bridge-domain-evis'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bridge_domain_evi is not None:
for child_ref in self.bridge_domain_evi:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis']['meta_info']
class BdPseudowires(object):
"""
List of pseudowires
.. attribute:: bd_pseudowire
Pseudowire configuration
**type**\: list of :py:class:`BdPseudowire <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_pseudowire = YList()
self.bd_pseudowire.parent = self
self.bd_pseudowire.name = 'bd_pseudowire'
class BdPseudowire(object):
"""
Pseudowire configuration
.. attribute:: neighbor <key>
Neighbor IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: pseudowire_id <key>
Pseudowire ID
**type**\: int
**range:** 1..4294967295
.. attribute:: bd_pw_class
PW class template name to use for this pseudowire
**type**\: str
**length:** 0..32
.. attribute:: bd_pw_mpls_static_labels
MPLS static labels
**type**\: :py:class:`BdPwMplsStaticLabels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels>`
.. attribute:: bd_pw_split_horizon
Split Horizon
**type**\: :py:class:`BdPwSplitHorizon <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon>`
.. attribute:: bd_pw_static_mac_addresses
Static Mac Address Table
**type**\: :py:class:`BdPwStaticMacAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses>`
.. attribute:: bdpw_storm_control_types
Storm Control
**type**\: :py:class:`BdpwStormControlTypes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes>`
.. attribute:: bridge_domain_backup_pseudowires
List of pseudowires
**type**\: :py:class:`BridgeDomainBackupPseudowires <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires>`
.. attribute:: pseudowire_dai
Access Pseudowire Dynamic ARP Inspection
**type**\: :py:class:`PseudowireDai <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai>`
.. attribute:: pseudowire_flooding
Bridge\-domain Pseudowire flooding
**type**\: :py:class:`InterfaceTrafficFloodEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterfaceTrafficFloodEnum>`
.. attribute:: pseudowire_flooding_unknown_unicast
Bridge\-domain Pseudowire flooding Unknown Unicast
**type**\: :py:class:`InterfaceTrafficFloodEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterfaceTrafficFloodEnum>`
.. attribute:: pseudowire_igmp_snoop
Attach a IGMP Snooping profile
**type**\: str
**length:** 0..32
.. attribute:: pseudowire_ip_source_guard
IP Source Guard
**type**\: :py:class:`PseudowireIpSourceGuard <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard>`
.. attribute:: pseudowire_mac
Bridge\-domain Pseudowire MAC configuration commands
**type**\: :py:class:`PseudowireMac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac>`
.. attribute:: pseudowire_mld_snoop
Attach a MLD Snooping profile
**type**\: str
**length:** 0..32
.. attribute:: pseudowire_profile
Attach a DHCP profile
**type**\: :py:class:`PseudowireProfile <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.neighbor = None
self.pseudowire_id = None
self.bd_pw_class = None
self.bd_pw_mpls_static_labels = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels()
self.bd_pw_mpls_static_labels.parent = self
self.bd_pw_split_horizon = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon()
self.bd_pw_split_horizon.parent = self
self.bd_pw_static_mac_addresses = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses()
self.bd_pw_static_mac_addresses.parent = self
self.bdpw_storm_control_types = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes()
self.bdpw_storm_control_types.parent = self
self.bridge_domain_backup_pseudowires = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires()
self.bridge_domain_backup_pseudowires.parent = self
self.pseudowire_dai = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai()
self.pseudowire_dai.parent = self
self.pseudowire_flooding = None
self.pseudowire_flooding_unknown_unicast = None
self.pseudowire_igmp_snoop = None
self.pseudowire_ip_source_guard = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard()
self.pseudowire_ip_source_guard.parent = self
self.pseudowire_mac = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac()
self.pseudowire_mac.parent = self
self.pseudowire_mld_snoop = None
self.pseudowire_profile = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile()
self.pseudowire_profile.parent = self
class PseudowireDai(object):
"""
Access Pseudowire Dynamic ARP Inspection
.. attribute:: disable
Disable Dynamic ARP Inspection
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable
Enable Access Pseudowire Dynamic ARP Inspection
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: logging
Logging Type
**type**\: :py:class:`L2VpnLoggingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnLoggingEnum>`
.. attribute:: pseudowire_dai_address_validation
Address Validation
**type**\: :py:class:`PseudowireDaiAddressValidation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.disable = None
self.enable = None
self.logging = None
self.pseudowire_dai_address_validation = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation()
self.pseudowire_dai_address_validation.parent = self
class PseudowireDaiAddressValidation(object):
"""
Address Validation
.. attribute:: destination_mac_verification
Destination MAC Verification
**type**\: :py:class:`L2VpnVerificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnVerificationEnum>`
.. attribute:: ipv4_verification
IPv4 Verification
**type**\: :py:class:`L2VpnVerificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnVerificationEnum>`
.. attribute:: source_mac_verification
Source MAC Verification
**type**\: :py:class:`L2VpnVerificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnVerificationEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.destination_mac_verification = None
self.ipv4_verification = None
self.source_mac_verification = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-dai-address-validation'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.destination_mac_verification is not None:
return True
if self.ipv4_verification is not None:
return True
if self.source_mac_verification is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-dai'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.disable is not None:
return True
if self.enable is not None:
return True
if self.logging is not None:
return True
if self.pseudowire_dai_address_validation is not None and self.pseudowire_dai_address_validation._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai']['meta_info']
class BdpwStormControlTypes(object):
"""
Storm Control
.. attribute:: bdpw_storm_control_type
Storm Control Type
**type**\: list of :py:class:`BdpwStormControlType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bdpw_storm_control_type = YList()
self.bdpw_storm_control_type.parent = self
self.bdpw_storm_control_type.name = 'bdpw_storm_control_type'
class BdpwStormControlType(object):
"""
Storm Control Type
.. attribute:: sctype <key>
Storm Control Type
**type**\: :py:class:`StormControlEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.StormControlEnum>`
.. attribute:: storm_control_unit
Specify units for Storm Control Configuration
**type**\: :py:class:`StormControlUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.sctype = None
self.storm_control_unit = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit()
self.storm_control_unit.parent = self
class StormControlUnit(object):
"""
Specify units for Storm Control Configuration
.. attribute:: kbits_per_sec
Kilobits Per Second, PktsPerSec and KbitsPerSec cannot be configured together
**type**\: int
**range:** 64..1280000
**units**\: kbit/s
.. attribute:: pkts_per_sec
Packets Per Second, PktsPerSec and KbitsPerSec cannot be configured together
**type**\: int
**range:** 1..160000
**units**\: packet/s
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.kbits_per_sec = None
self.pkts_per_sec = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:storm-control-unit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.kbits_per_sec is not None:
return True
if self.pkts_per_sec is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.sctype is None:
raise YPYModelError('Key property sctype is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bdpw-storm-control-type[Cisco-IOS-XR-l2vpn-cfg:sctype = ' + str(self.sctype) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.sctype is not None:
return True
if self.storm_control_unit is not None and self.storm_control_unit._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bdpw-storm-control-types'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bdpw_storm_control_type is not None:
for child_ref in self.bdpw_storm_control_type:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes']['meta_info']
class PseudowireProfile(object):
"""
Attach a DHCP profile
.. attribute:: dhcp_snooping_id
Disable DHCP snooping
**type**\: str
.. attribute:: profile_id
Set the snooping profile
**type**\: :py:class:`InterfaceProfileEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterfaceProfileEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.dhcp_snooping_id = None
self.profile_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-profile'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dhcp_snooping_id is not None:
return True
if self.profile_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile']['meta_info']
class BdPwStaticMacAddresses(object):
"""
Static Mac Address Table
.. attribute:: bd_pw_static_mac_address
Static Mac Address Configuration
**type**\: list of :py:class:`BdPwStaticMacAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_pw_static_mac_address = YList()
self.bd_pw_static_mac_address.parent = self
self.bd_pw_static_mac_address.name = 'bd_pw_static_mac_address'
class BdPwStaticMacAddress(object):
"""
Static Mac Address Configuration
.. attribute:: address <key>
Static MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-pw-static-mac-address[Cisco-IOS-XR-l2vpn-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-pw-static-mac-addresses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_pw_static_mac_address is not None:
for child_ref in self.bd_pw_static_mac_address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses']['meta_info']
class PseudowireIpSourceGuard(object):
"""
IP Source Guard
.. attribute:: disable
Disable Dynamic IP source guard
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable
Enable IP Source Guard
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: logging
Logging Type
**type**\: :py:class:`L2VpnLoggingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnLoggingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.disable = None
self.enable = None
self.logging = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-ip-source-guard'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.disable is not None:
return True
if self.enable is not None:
return True
if self.logging is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard']['meta_info']
class PseudowireMac(object):
"""
Bridge\-domain Pseudowire MAC
configuration commands
.. attribute:: enable
Bridge\-domain Pseudowire MAC configuration mode
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: pseudowire_mac_aging
MAC\-Aging configuration commands
**type**\: :py:class:`PseudowireMacAging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging>`
.. attribute:: pseudowire_mac_learning
Enable MAC Learning
**type**\: :py:class:`MacLearnEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLearnEnum>`
.. attribute:: pseudowire_mac_limit
MAC\-Limit configuration commands
**type**\: :py:class:`PseudowireMacLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit>`
.. attribute:: pseudowire_mac_port_down_flush
Enable/Disable MAC Flush When Port goes down
**type**\: :py:class:`PortDownFlushEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.PortDownFlushEnum>`
.. attribute:: pseudowire_mac_secure
MAC Secure
**type**\: :py:class:`PseudowireMacSecure <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.pseudowire_mac_aging = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging()
self.pseudowire_mac_aging.parent = self
self.pseudowire_mac_learning = None
self.pseudowire_mac_limit = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit()
self.pseudowire_mac_limit.parent = self
self.pseudowire_mac_port_down_flush = None
self.pseudowire_mac_secure = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure()
self.pseudowire_mac_secure.parent = self
class PseudowireMacSecure(object):
"""
MAC Secure
.. attribute:: action
MAC secure enforcement action
**type**\: :py:class:`MacSecureActionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacSecureActionEnum>`
.. attribute:: disable
Disable L2 Pseudowire MAC Secure
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable
Enable MAC Secure
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: logging
MAC Secure Logging
**type**\: :py:class:`L2VpnLoggingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnLoggingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.action = None
self.disable = None
self.enable = None
self.logging = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-mac-secure'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.action is not None:
return True
if self.disable is not None:
return True
if self.enable is not None:
return True
if self.logging is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure']['meta_info']
class PseudowireMacAging(object):
"""
MAC\-Aging configuration commands
.. attribute:: pseudowire_mac_aging_time
MAC Aging Time
**type**\: int
**range:** 300..30000
.. attribute:: pseudowire_mac_aging_type
MAC address aging type
**type**\: :py:class:`MacAgingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacAgingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pseudowire_mac_aging_time = None
self.pseudowire_mac_aging_type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-mac-aging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pseudowire_mac_aging_time is not None:
return True
if self.pseudowire_mac_aging_type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging']['meta_info']
class PseudowireMacLimit(object):
"""
MAC\-Limit configuration commands
.. attribute:: pseudowire_mac_limit_action
Bridge Access Pseudowire MAC address limit enforcement action
**type**\: :py:class:`MacLimitActionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLimitActionEnum>`
.. attribute:: pseudowire_mac_limit_max
Number of MAC addresses on a Bridge Access Pseudowire after which MAC limit action is taken
**type**\: int
**range:** 0..4294967295
.. attribute:: pseudowire_mac_limit_notif
MAC address limit notification action in a Bridge Access Pseudowire
**type**\: :py:class:`MacNotificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacNotificationEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pseudowire_mac_limit_action = None
self.pseudowire_mac_limit_max = None
self.pseudowire_mac_limit_notif = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-mac-limit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pseudowire_mac_limit_action is not None:
return True
if self.pseudowire_mac_limit_max is not None:
return True
if self.pseudowire_mac_limit_notif is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-mac'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.pseudowire_mac_aging is not None and self.pseudowire_mac_aging._has_data():
return True
if self.pseudowire_mac_learning is not None:
return True
if self.pseudowire_mac_limit is not None and self.pseudowire_mac_limit._has_data():
return True
if self.pseudowire_mac_port_down_flush is not None:
return True
if self.pseudowire_mac_secure is not None and self.pseudowire_mac_secure._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
class BdPwSplitHorizon(object):
"""
Split Horizon
.. attribute:: bd_pw_split_horizon_group
Split Horizon Group
**type**\: :py:class:`BdPwSplitHorizonGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_pw_split_horizon_group = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup()
self.bd_pw_split_horizon_group.parent = self
class BdPwSplitHorizonGroup(object):
"""
Split Horizon Group
.. attribute:: enable
Enable split horizon group
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-pw-split-horizon-group'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-pw-split-horizon'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_pw_split_horizon_group is not None and self.bd_pw_split_horizon_group._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon']['meta_info']
class BdPwMplsStaticLabels(object):
"""
MPLS static labels
.. attribute:: local_static_label
Pseudowire local static label
**type**\: int
**range:** 16..1048575
.. attribute:: remote_static_label
Pseudowire remote static label
**type**\: int
**range:** 16..1048575
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_static_label = None
self.remote_static_label = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-pw-mpls-static-labels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.local_static_label is not None:
return True
if self.remote_static_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels']['meta_info']
class BridgeDomainBackupPseudowires(object):
"""
List of pseudowires
.. attribute:: bridge_domain_backup_pseudowire
Backup pseudowire configuration
**type**\: list of :py:class:`BridgeDomainBackupPseudowire <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bridge_domain_backup_pseudowire = YList()
self.bridge_domain_backup_pseudowire.parent = self
self.bridge_domain_backup_pseudowire.name = 'bridge_domain_backup_pseudowire'
class BridgeDomainBackupPseudowire(object):
"""
Backup pseudowire configuration
.. attribute:: neighbor <key>
Neighbor IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: pseudowire_id <key>
Pseudowire ID
**type**\: int
**range:** 1..4294967295
.. attribute:: bridge_domain_backup_pw_class
PW class template name to use for this pseudowire
**type**\: str
**length:** 0..32
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.neighbor = None
self.pseudowire_id = None
self.bridge_domain_backup_pw_class = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.neighbor is None:
raise YPYModelError('Key property neighbor is None')
if self.pseudowire_id is None:
raise YPYModelError('Key property pseudowire_id is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bridge-domain-backup-pseudowire[Cisco-IOS-XR-l2vpn-cfg:neighbor = ' + str(self.neighbor) + '][Cisco-IOS-XR-l2vpn-cfg:pseudowire-id = ' + str(self.pseudowire_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.neighbor is not None:
return True
if self.pseudowire_id is not None:
return True
if self.bridge_domain_backup_pw_class is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bridge-domain-backup-pseudowires'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bridge_domain_backup_pseudowire is not None:
for child_ref in self.bridge_domain_backup_pseudowire:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.neighbor is None:
raise YPYModelError('Key property neighbor is None')
if self.pseudowire_id is None:
raise YPYModelError('Key property pseudowire_id is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-pseudowire[Cisco-IOS-XR-l2vpn-cfg:neighbor = ' + str(self.neighbor) + '][Cisco-IOS-XR-l2vpn-cfg:pseudowire-id = ' + str(self.pseudowire_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.neighbor is not None:
return True
if self.pseudowire_id is not None:
return True
if self.bd_pw_class is not None:
return True
if self.bd_pw_mpls_static_labels is not None and self.bd_pw_mpls_static_labels._has_data():
return True
if self.bd_pw_split_horizon is not None and self.bd_pw_split_horizon._has_data():
return True
if self.bd_pw_static_mac_addresses is not None and self.bd_pw_static_mac_addresses._has_data():
return True
if self.bdpw_storm_control_types is not None and self.bdpw_storm_control_types._has_data():
return True
if self.bridge_domain_backup_pseudowires is not None and self.bridge_domain_backup_pseudowires._has_data():
return True
if self.pseudowire_dai is not None and self.pseudowire_dai._has_data():
return True
if self.pseudowire_flooding is not None:
return True
if self.pseudowire_flooding_unknown_unicast is not None:
return True
if self.pseudowire_igmp_snoop is not None:
return True
if self.pseudowire_ip_source_guard is not None and self.pseudowire_ip_source_guard._has_data():
return True
if self.pseudowire_mac is not None and self.pseudowire_mac._has_data():
return True
if self.pseudowire_mld_snoop is not None:
return True
if self.pseudowire_profile is not None and self.pseudowire_profile._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-pseudowires'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_pseudowire is not None:
for child_ref in self.bd_pseudowire:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires']['meta_info']
class Vfis(object):
"""
Specify the virtual forwarding interface
name
.. attribute:: vfi
Name of the Virtual Forwarding Interface
**type**\: list of :py:class:`Vfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.vfi = YList()
self.vfi.parent = self
self.vfi.name = 'vfi'
class Vfi(object):
"""
Name of the Virtual Forwarding Interface
.. attribute:: name <key>
Name of the Virtual Forwarding Interface
**type**\: str
**length:** 0..32
.. attribute:: bgp_auto_discovery
Enable Autodiscovery BGP in this VFI
**type**\: :py:class:`BgpAutoDiscovery <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery>`
.. attribute:: multicast_p2mp
Enable Multicast P2MP in this VFI
**type**\: :py:class:`MulticastP2Mp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp>`
.. attribute:: vfi_pseudowires
List of pseudowires
**type**\: :py:class:`VfiPseudowires <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires>`
.. attribute:: vfi_shutdown
Enabling Shutdown
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpnid
VPN Identifier
**type**\: int
**range:** 1..4294967295
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.bgp_auto_discovery = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery()
self.bgp_auto_discovery.parent = self
self.multicast_p2mp = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp()
self.multicast_p2mp.parent = self
self.vfi_pseudowires = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires()
self.vfi_pseudowires.parent = self
self.vfi_shutdown = None
self.vpnid = None
class MulticastP2Mp(object):
"""
Enable Multicast P2MP in this VFI
.. attribute:: enable
Enable Autodiscovery P2MP
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: signalings
Multicast P2MP Signaling Type
**type**\: :py:class:`Signalings <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings>`
.. attribute:: transports
Multicast P2MP Transport
**type**\: :py:class:`Transports <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.signalings = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings()
self.signalings.parent = self
self.transports = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports()
self.transports.parent = self
class Transports(object):
"""
Multicast P2MP Transport
.. attribute:: transport
Multicast P2MP Transport Type
**type**\: list of :py:class:`Transport <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.transport = YList()
self.transport.parent = self
self.transport.name = 'transport'
class Transport(object):
"""
Multicast P2MP Transport Type
.. attribute:: transport_name <key>
Transport Type
**type**\: str
**pattern:** (RSVP\_TE)
.. attribute:: attribute_set_name
Multicast P2MP TE Attribute Set Name
**type**\: str
**length:** 0..64
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.transport_name = None
self.attribute_set_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.transport_name is None:
raise YPYModelError('Key property transport_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:transport[Cisco-IOS-XR-l2vpn-cfg:transport-name = ' + str(self.transport_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.transport_name is not None:
return True
if self.attribute_set_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:transports'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.transport is not None:
for child_ref in self.transport:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports']['meta_info']
class Signalings(object):
"""
Multicast P2MP Signaling Type
.. attribute:: signaling
Multicast P2MP Signaling Type
**type**\: list of :py:class:`Signaling <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.signaling = YList()
self.signaling.parent = self
self.signaling.name = 'signaling'
class Signaling(object):
"""
Multicast P2MP Signaling Type
.. attribute:: signaling_name <key>
Signaling Type
**type**\: str
**pattern:** (BGP)
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.signaling_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.signaling_name is None:
raise YPYModelError('Key property signaling_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:signaling[Cisco-IOS-XR-l2vpn-cfg:signaling-name = ' + str(self.signaling_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.signaling_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:signalings'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.signaling is not None:
for child_ref in self.signaling:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:multicast-p2mp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.signalings is not None and self.signalings._has_data():
return True
if self.transports is not None and self.transports._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info']
class VfiPseudowires(object):
"""
List of pseudowires
.. attribute:: vfi_pseudowire
Pseudowire configuration
**type**\: list of :py:class:`VfiPseudowire <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.vfi_pseudowire = YList()
self.vfi_pseudowire.parent = self
self.vfi_pseudowire.name = 'vfi_pseudowire'
class VfiPseudowire(object):
"""
Pseudowire configuration
.. attribute:: neighbor <key>
Neighbor IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: pseudowire_id <key>
Pseudowire ID
**type**\: int
**range:** 1..4294967295
.. attribute:: pseudowire_static_mac_addresses
Static Mac Address Table
**type**\: :py:class:`PseudowireStaticMacAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses>`
.. attribute:: vfi_pw_class
PW class template name to use for this pseudowire
**type**\: str
**length:** 0..32
.. attribute:: vfi_pw_dhcp_snoop
Attach a DHCP Snooping profile
**type**\: :py:class:`VfiPwDhcpSnoop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop>`
.. attribute:: vfi_pw_igmp_snoop
Attach a IGMP Snooping profile
**type**\: str
**length:** 0..32
.. attribute:: vfi_pw_mld_snoop
Attach a MLD Snooping profile
**type**\: str
**length:** 0..32
.. attribute:: vfi_pw_mpls_static_labels
MPLS static labels
**type**\: :py:class:`VfiPwMplsStaticLabels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.neighbor = None
self.pseudowire_id = None
self.pseudowire_static_mac_addresses = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses()
self.pseudowire_static_mac_addresses.parent = self
self.vfi_pw_class = None
self.vfi_pw_dhcp_snoop = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop()
self.vfi_pw_dhcp_snoop.parent = self
self.vfi_pw_igmp_snoop = None
self.vfi_pw_mld_snoop = None
self.vfi_pw_mpls_static_labels = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels()
self.vfi_pw_mpls_static_labels.parent = self
class VfiPwDhcpSnoop(object):
"""
Attach a DHCP Snooping profile
.. attribute:: dhcp_snooping_id
Disable DHCP snooping
**type**\: str
.. attribute:: profile_id
Set the snooping profile
**type**\: :py:class:`InterfaceProfileEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterfaceProfileEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.dhcp_snooping_id = None
self.profile_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vfi-pw-dhcp-snoop'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dhcp_snooping_id is not None:
return True
if self.profile_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop']['meta_info']
class VfiPwMplsStaticLabels(object):
"""
MPLS static labels
.. attribute:: local_static_label
Pseudowire local static label
**type**\: int
**range:** 16..1048575
.. attribute:: remote_static_label
Pseudowire remote static label
**type**\: int
**range:** 16..1048575
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_static_label = None
self.remote_static_label = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vfi-pw-mpls-static-labels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.local_static_label is not None:
return True
if self.remote_static_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels']['meta_info']
class PseudowireStaticMacAddresses(object):
"""
Static Mac Address Table
.. attribute:: pseudowire_static_mac_address
Static Mac Address Configuration
**type**\: list of :py:class:`PseudowireStaticMacAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pseudowire_static_mac_address = YList()
self.pseudowire_static_mac_address.parent = self
self.pseudowire_static_mac_address.name = 'pseudowire_static_mac_address'
class PseudowireStaticMacAddress(object):
"""
Static Mac Address Configuration
.. attribute:: address <key>
Static MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-static-mac-address[Cisco-IOS-XR-l2vpn-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:pseudowire-static-mac-addresses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pseudowire_static_mac_address is not None:
for child_ref in self.pseudowire_static_mac_address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.neighbor is None:
raise YPYModelError('Key property neighbor is None')
if self.pseudowire_id is None:
raise YPYModelError('Key property pseudowire_id is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vfi-pseudowire[Cisco-IOS-XR-l2vpn-cfg:neighbor = ' + str(self.neighbor) + '][Cisco-IOS-XR-l2vpn-cfg:pseudowire-id = ' + str(self.pseudowire_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.neighbor is not None:
return True
if self.pseudowire_id is not None:
return True
if self.pseudowire_static_mac_addresses is not None and self.pseudowire_static_mac_addresses._has_data():
return True
if self.vfi_pw_class is not None:
return True
if self.vfi_pw_dhcp_snoop is not None and self.vfi_pw_dhcp_snoop._has_data():
return True
if self.vfi_pw_igmp_snoop is not None:
return True
if self.vfi_pw_mld_snoop is not None:
return True
if self.vfi_pw_mpls_static_labels is not None and self.vfi_pw_mpls_static_labels._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vfi-pseudowires'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vfi_pseudowire is not None:
for child_ref in self.vfi_pseudowire:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires']['meta_info']
class BgpAutoDiscovery(object):
"""
Enable Autodiscovery BGP in this VFI
.. attribute:: ad_control_word
Enable control\-word for this VFI
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: bgp_route_policy
Route policy
**type**\: :py:class:`BgpRoutePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy>`
.. attribute:: bgp_signaling_protocol
Enable Signaling Protocol BGP in this VFI
**type**\: :py:class:`BgpSignalingProtocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol>`
.. attribute:: enable
Enable Autodiscovery BGP
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ldp_signaling_protocol
Signaling Protocol LDP in this VFI configuration
**type**\: :py:class:`LdpSignalingProtocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol>`
.. attribute:: route_distinguisher
Route Distinguisher
**type**\: :py:class:`RouteDistinguisher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher>`
.. attribute:: route_targets
Route Target
**type**\: :py:class:`RouteTargets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets>`
.. attribute:: table_policy
Table Policy for installation of forwarding data to L2FIB
**type**\: str
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.ad_control_word = None
self.bgp_route_policy = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy()
self.bgp_route_policy.parent = self
self.bgp_signaling_protocol = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol()
self.bgp_signaling_protocol.parent = self
self.enable = None
self.ldp_signaling_protocol = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol()
self.ldp_signaling_protocol.parent = self
self.route_distinguisher = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher()
self.route_distinguisher.parent = self
self.route_targets = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets()
self.route_targets.parent = self
self.table_policy = None
class LdpSignalingProtocol(object):
"""
Signaling Protocol LDP in this VFI
configuration
.. attribute:: enable
Enable LDP as Signaling Protocol .Deletion of this object also causes deletion of all objects under LDPSignalingProtocol
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: flow_label_load_balance
Enable Flow Label based load balancing
**type**\: :py:class:`FlowLabelLoadBalance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance>`
.. attribute:: vplsid
VPLS ID
**type**\: :py:class:`Vplsid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.flow_label_load_balance = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance()
self.flow_label_load_balance.parent = self
self.vplsid = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid()
self.vplsid.parent = self
class Vplsid(object):
"""
VPLS ID
.. attribute:: address
IPV4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: address_index
Address index
**type**\: int
**range:** 0..32767
.. attribute:: as_
Two byte AS number
**type**\: int
**range:** 1..65535
.. attribute:: as_index
AS index
**type**\: int
**range:** 0..4294967295
.. attribute:: type
VPLS\-ID Type
**type**\: :py:class:`LdpVplsIdEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.LdpVplsIdEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address = None
self.address_index = None
self.as_ = None
self.as_index = None
self.type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vplsid'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
if self.address_index is not None:
return True
if self.as_ is not None:
return True
if self.as_index is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid']['meta_info']
class FlowLabelLoadBalance(object):
"""
Enable Flow Label based load balancing
.. attribute:: flow_label
Flow Label load balance type
**type**\: :py:class:`FlowLabelLoadBalanceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.FlowLabelLoadBalanceEnum>`
.. attribute:: static
Static Flow Label
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.flow_label = None
self.static = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:flow-label-load-balance'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.flow_label is not None:
return True
if self.static is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:ldp-signaling-protocol'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.flow_label_load_balance is not None and self.flow_label_load_balance._has_data():
return True
if self.vplsid is not None and self.vplsid._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info']
class BgpRoutePolicy(object):
"""
Route policy
.. attribute:: export
Export route policy
**type**\: str
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.export = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bgp-route-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.export is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy']['meta_info']
class RouteDistinguisher(object):
"""
Route Distinguisher
.. attribute:: addr_index
Addr index
**type**\: int
**range:** 0..65535
.. attribute:: address
IPV4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: as_
Two byte or 4 byte AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index
AS\:nn (hex or decimal format)
**type**\: int
**range:** 0..4294967295
.. attribute:: type
Router Distinguisher Type
**type**\: :py:class:`BgpRouteDistinguisherEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteDistinguisherEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.addr_index = None
self.address = None
self.as_ = None
self.as_index = None
self.type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:route-distinguisher'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addr_index is not None:
return True
if self.address is not None:
return True
if self.as_ is not None:
return True
if self.as_index is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher']['meta_info']
class BgpSignalingProtocol(object):
"""
Enable Signaling Protocol BGP in this
VFI
.. attribute:: enable
Enable BGP as Signaling Protocol
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: flow_label_load_balance
Enable Flow Label based load balancing
**type**\: :py:class:`FlowLabelLoadBalance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance>`
.. attribute:: ve_range
Local Virtual Edge Block Configurable Range
**type**\: int
**range:** 11..100
.. attribute:: veid
Local Virtual Edge Identifier
**type**\: int
**range:** 1..16384
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.flow_label_load_balance = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance()
self.flow_label_load_balance.parent = self
self.ve_range = None
self.veid = None
class FlowLabelLoadBalance(object):
"""
Enable Flow Label based load balancing
.. attribute:: flow_label
Flow Label load balance type
**type**\: :py:class:`FlowLabelLoadBalanceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.FlowLabelLoadBalanceEnum>`
.. attribute:: static
Static Flow Label
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.flow_label = None
self.static = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:flow-label-load-balance'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.flow_label is not None:
return True
if self.static is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bgp-signaling-protocol'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.flow_label_load_balance is not None and self.flow_label_load_balance._has_data():
return True
if self.ve_range is not None:
return True
if self.veid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol']['meta_info']
class RouteTargets(object):
"""
Route Target
.. attribute:: route_target
Name of the Route Target
**type**\: list of :py:class:`RouteTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.route_target = YList()
self.route_target.parent = self
self.route_target.name = 'route_target'
class RouteTarget(object):
"""
Name of the Route Target
.. attribute:: format <key>
Format of the route target
**type**\: :py:class:`BgpRouteTargetFormatEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteTargetFormatEnum>`
.. attribute:: role <key>
Role of the router target type
**type**\: :py:class:`BgpRouteTargetRoleEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteTargetRoleEnum>`
.. attribute:: ipv4_address
ipv4 address
**type**\: list of :py:class:`Ipv4Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address>`
.. attribute:: two_byte_as_or_four_byte_as
two byte as or four byte as
**type**\: list of :py:class:`TwoByteAsOrFourByteAs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.format = None
self.role = None
self.ipv4_address = YList()
self.ipv4_address.parent = self
self.ipv4_address.name = 'ipv4_address'
self.two_byte_as_or_four_byte_as = YList()
self.two_byte_as_or_four_byte_as.parent = self
self.two_byte_as_or_four_byte_as.name = 'two_byte_as_or_four_byte_as'
class TwoByteAsOrFourByteAs(object):
"""
two byte as or four byte as
.. attribute:: as_ <key>
Two byte or 4 byte AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index <key>
AS\:nn (hex or decimal format)
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.as_ = None
self.as_index = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.as_ is None:
raise YPYModelError('Key property as_ is None')
if self.as_index is None:
raise YPYModelError('Key property as_index is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:two-byte-as-or-four-byte-as[Cisco-IOS-XR-l2vpn-cfg:as = ' + str(self.as_) + '][Cisco-IOS-XR-l2vpn-cfg:as-index = ' + str(self.as_index) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.as_ is not None:
return True
if self.as_index is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs']['meta_info']
class Ipv4Address(object):
"""
ipv4 address
.. attribute:: addr_index <key>
Addr index
**type**\: int
**range:** 0..65535
.. attribute:: address <key>
IPV4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.addr_index = None
self.address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.addr_index is None:
raise YPYModelError('Key property addr_index is None')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:ipv4-address[Cisco-IOS-XR-l2vpn-cfg:addr-index = ' + str(self.addr_index) + '][Cisco-IOS-XR-l2vpn-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addr_index is not None:
return True
if self.address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.format is None:
raise YPYModelError('Key property format is None')
if self.role is None:
raise YPYModelError('Key property role is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:route-target[Cisco-IOS-XR-l2vpn-cfg:format = ' + str(self.format) + '][Cisco-IOS-XR-l2vpn-cfg:role = ' + str(self.role) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.format is not None:
return True
if self.role is not None:
return True
if self.ipv4_address is not None:
for child_ref in self.ipv4_address:
if child_ref._has_data():
return True
if self.two_byte_as_or_four_byte_as is not None:
for child_ref in self.two_byte_as_or_four_byte_as:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:route-targets'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.route_target is not None:
for child_ref in self.route_target:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bgp-auto-discovery'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ad_control_word is not None:
return True
if self.bgp_route_policy is not None and self.bgp_route_policy._has_data():
return True
if self.bgp_signaling_protocol is not None and self.bgp_signaling_protocol._has_data():
return True
if self.enable is not None:
return True
if self.ldp_signaling_protocol is not None and self.ldp_signaling_protocol._has_data():
return True
if self.route_distinguisher is not None and self.route_distinguisher._has_data():
return True
if self.route_targets is not None and self.route_targets._has_data():
return True
if self.table_policy is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vfi[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.bgp_auto_discovery is not None and self.bgp_auto_discovery._has_data():
return True
if self.multicast_p2mp is not None and self.multicast_p2mp._has_data():
return True
if self.vfi_pseudowires is not None and self.vfi_pseudowires._has_data():
return True
if self.vfi_shutdown is not None:
return True
if self.vpnid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vfis'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vfi is not None:
for child_ref in self.vfi:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis']['meta_info']
class BdAttachmentCircuits(object):
"""
Attachment Circuit table
.. attribute:: bd_attachment_circuit
Name of the Attachment Circuit
**type**\: list of :py:class:`BdAttachmentCircuit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_attachment_circuit = YList()
self.bd_attachment_circuit.parent = self
self.bd_attachment_circuit.name = 'bd_attachment_circuit'
class BdAttachmentCircuit(object):
"""
Name of the Attachment Circuit
.. attribute:: name <key>
The name of the Attachment Circuit
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: bdac_storm_control_types
Storm Control
**type**\: :py:class:`BdacStormControlTypes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes>`
.. attribute:: interface_dai
L2 Interface Dynamic ARP Inspection
**type**\: :py:class:`InterfaceDai <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai>`
.. attribute:: interface_flooding
Enable or Disable Flooding
**type**\: :py:class:`InterfaceTrafficFloodEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterfaceTrafficFloodEnum>`
.. attribute:: interface_flooding_unknown_unicast
Enable or Disable Unknown Unicast Flooding
**type**\: :py:class:`InterfaceTrafficFloodEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterfaceTrafficFloodEnum>`
.. attribute:: interface_igmp_snoop
Attach a IGMP Snooping profile
**type**\: str
**length:** 0..32
.. attribute:: interface_ip_source_guard
IP Source Guard
**type**\: :py:class:`InterfaceIpSourceGuard <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard>`
.. attribute:: interface_mac
MAC configuration commands
**type**\: :py:class:`InterfaceMac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac>`
.. attribute:: interface_mld_snoop
Attach a MLD Snooping profile
**type**\: str
**length:** 0..32
.. attribute:: interface_profile
Attach a DHCP profile
**type**\: :py:class:`InterfaceProfile <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile>`
.. attribute:: split_horizon
Split Horizon
**type**\: :py:class:`SplitHorizon <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon>`
.. attribute:: static_mac_addresses
Static Mac Address Table
**type**\: :py:class:`StaticMacAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.bdac_storm_control_types = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes()
self.bdac_storm_control_types.parent = self
self.interface_dai = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai()
self.interface_dai.parent = self
self.interface_flooding = None
self.interface_flooding_unknown_unicast = None
self.interface_igmp_snoop = None
self.interface_ip_source_guard = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard()
self.interface_ip_source_guard.parent = self
self.interface_mac = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac()
self.interface_mac.parent = self
self.interface_mld_snoop = None
self.interface_profile = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile()
self.interface_profile.parent = self
self.split_horizon = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon()
self.split_horizon.parent = self
self.static_mac_addresses = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses()
self.static_mac_addresses.parent = self
class InterfaceIpSourceGuard(object):
"""
IP Source Guard
.. attribute:: disable
Disable L2 Interface Dynamic IP source guard
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable
Enable IP Source Guard
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: logging
Logging Type
**type**\: :py:class:`L2VpnLoggingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnLoggingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.disable = None
self.enable = None
self.logging = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interface-ip-source-guard'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.disable is not None:
return True
if self.enable is not None:
return True
if self.logging is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard']['meta_info']
class InterfaceDai(object):
"""
L2 Interface Dynamic ARP Inspection
.. attribute:: disable
Disable L2 Interface Dynamic ARP Inspection
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable
Enable L2 Interface Dynamic ARP Inspection
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: interface_dai_address_validation
Address Validation
**type**\: :py:class:`InterfaceDaiAddressValidation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation>`
.. attribute:: logging
Logging Type
**type**\: :py:class:`L2VpnLoggingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnLoggingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.disable = None
self.enable = None
self.interface_dai_address_validation = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation()
self.interface_dai_address_validation.parent = self
self.logging = None
class InterfaceDaiAddressValidation(object):
"""
Address Validation
.. attribute:: destination_mac_verification
Destination MAC Verification
**type**\: :py:class:`L2VpnVerificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnVerificationEnum>`
.. attribute:: enable
Enable Address Validation
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ipv4_verification
IPv4 Verification
**type**\: :py:class:`L2VpnVerificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnVerificationEnum>`
.. attribute:: source_mac_verification
Source MAC Verification
**type**\: :py:class:`L2VpnVerificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnVerificationEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.destination_mac_verification = None
self.enable = None
self.ipv4_verification = None
self.source_mac_verification = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interface-dai-address-validation'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.destination_mac_verification is not None:
return True
if self.enable is not None:
return True
if self.ipv4_verification is not None:
return True
if self.source_mac_verification is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interface-dai'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.disable is not None:
return True
if self.enable is not None:
return True
if self.interface_dai_address_validation is not None and self.interface_dai_address_validation._has_data():
return True
if self.logging is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai']['meta_info']
class InterfaceProfile(object):
"""
Attach a DHCP profile
.. attribute:: dhcp_snooping_id
Disable DHCP snooping
**type**\: str
.. attribute:: profile_id
Set the snooping profile
**type**\: :py:class:`InterfaceProfileEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.InterfaceProfileEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.dhcp_snooping_id = None
self.profile_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interface-profile'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dhcp_snooping_id is not None:
return True
if self.profile_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile']['meta_info']
class BdacStormControlTypes(object):
"""
Storm Control
.. attribute:: bdac_storm_control_type
Storm Control Type
**type**\: list of :py:class:`BdacStormControlType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bdac_storm_control_type = YList()
self.bdac_storm_control_type.parent = self
self.bdac_storm_control_type.name = 'bdac_storm_control_type'
class BdacStormControlType(object):
"""
Storm Control Type
.. attribute:: sctype <key>
Storm Control Type
**type**\: :py:class:`StormControlEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.StormControlEnum>`
.. attribute:: storm_control_unit
Specify units for Storm Control Configuration
**type**\: :py:class:`StormControlUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.sctype = None
self.storm_control_unit = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit()
self.storm_control_unit.parent = self
class StormControlUnit(object):
"""
Specify units for Storm Control Configuration
.. attribute:: kbits_per_sec
Kilobits Per Second, PktsPerSec and KbitsPerSec cannot be configured together
**type**\: int
**range:** 64..1280000
**units**\: kbit/s
.. attribute:: pkts_per_sec
Packets Per Second, PktsPerSec and KbitsPerSec cannot be configured together
**type**\: int
**range:** 1..160000
**units**\: packet/s
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.kbits_per_sec = None
self.pkts_per_sec = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:storm-control-unit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.kbits_per_sec is not None:
return True
if self.pkts_per_sec is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.sctype is None:
raise YPYModelError('Key property sctype is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bdac-storm-control-type[Cisco-IOS-XR-l2vpn-cfg:sctype = ' + str(self.sctype) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.sctype is not None:
return True
if self.storm_control_unit is not None and self.storm_control_unit._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bdac-storm-control-types'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bdac_storm_control_type is not None:
for child_ref in self.bdac_storm_control_type:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes']['meta_info']
class SplitHorizon(object):
"""
Split Horizon
.. attribute:: split_horizon_group_id
Split Horizon Group ID
**type**\: :py:class:`SplitHorizonGroupId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.split_horizon_group_id = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId()
self.split_horizon_group_id.parent = self
class SplitHorizonGroupId(object):
"""
Split Horizon Group ID
.. attribute:: enable
Enable split horizon group
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:split-horizon-group-id'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:split-horizon'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.split_horizon_group_id is not None and self.split_horizon_group_id._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon']['meta_info']
class StaticMacAddresses(object):
"""
Static Mac Address Table
.. attribute:: static_mac_address
Static Mac Address Configuration
**type**\: list of :py:class:`StaticMacAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.static_mac_address = YList()
self.static_mac_address.parent = self
self.static_mac_address.name = 'static_mac_address'
class StaticMacAddress(object):
"""
Static Mac Address Configuration
.. attribute:: address <key>
Static MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:static-mac-address[Cisco-IOS-XR-l2vpn-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:static-mac-addresses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.static_mac_address is not None:
for child_ref in self.static_mac_address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses']['meta_info']
class InterfaceMac(object):
"""
MAC configuration commands
.. attribute:: interface_mac_aging
MAC\-Aging configuration commands
**type**\: :py:class:`InterfaceMacAging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging>`
.. attribute:: interface_mac_learning
Enable Mac Learning
**type**\: :py:class:`MacLearnEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLearnEnum>`
.. attribute:: interface_mac_limit
MAC\-Limit configuration commands
**type**\: :py:class:`InterfaceMacLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit>`
.. attribute:: interface_mac_port_down_flush
Enable/Disable MAC Flush When Port goes down
**type**\: :py:class:`PortDownFlushEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.PortDownFlushEnum>`
.. attribute:: interface_mac_secure
MAC Secure
**type**\: :py:class:`InterfaceMacSecure <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_mac_aging = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging()
self.interface_mac_aging.parent = self
self.interface_mac_learning = None
self.interface_mac_limit = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit()
self.interface_mac_limit.parent = self
self.interface_mac_port_down_flush = None
self.interface_mac_secure = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure()
self.interface_mac_secure.parent = self
class InterfaceMacAging(object):
"""
MAC\-Aging configuration commands
.. attribute:: interface_mac_aging_time
Mac Aging Time
**type**\: int
**range:** 300..30000
.. attribute:: interface_mac_aging_type
MAC address aging type
**type**\: :py:class:`MacAgingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacAgingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_mac_aging_time = None
self.interface_mac_aging_type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interface-mac-aging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_mac_aging_time is not None:
return True
if self.interface_mac_aging_type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging']['meta_info']
class InterfaceMacSecure(object):
"""
MAC Secure
.. attribute:: action
MAC secure enforcement action
**type**\: :py:class:`MacSecureActionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacSecureActionEnum>`
.. attribute:: disable
Disable L2 Interface MAC Secure
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable
Enable MAC Secure
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: logging
MAC Secure Logging
**type**\: :py:class:`L2VpnLoggingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2VpnLoggingEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.action = None
self.disable = None
self.enable = None
self.logging = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interface-mac-secure'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.action is not None:
return True
if self.disable is not None:
return True
if self.enable is not None:
return True
if self.logging is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure']['meta_info']
class InterfaceMacLimit(object):
"""
MAC\-Limit configuration commands
.. attribute:: interface_mac_limit_action
Interface MAC address limit enforcement action
**type**\: :py:class:`MacLimitActionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacLimitActionEnum>`
.. attribute:: interface_mac_limit_max
Number of MAC addresses on an Interface after which MAC limit action is taken
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_mac_limit_notif
MAC address limit notification action in a Interface
**type**\: :py:class:`MacNotificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MacNotificationEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_mac_limit_action = None
self.interface_mac_limit_max = None
self.interface_mac_limit_notif = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interface-mac-limit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_mac_limit_action is not None:
return True
if self.interface_mac_limit_max is not None:
return True
if self.interface_mac_limit_notif is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interface-mac'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_mac_aging is not None and self.interface_mac_aging._has_data():
return True
if self.interface_mac_learning is not None:
return True
if self.interface_mac_limit is not None and self.interface_mac_limit._has_data():
return True
if self.interface_mac_port_down_flush is not None:
return True
if self.interface_mac_secure is not None and self.interface_mac_secure._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-attachment-circuit[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.bdac_storm_control_types is not None and self.bdac_storm_control_types._has_data():
return True
if self.interface_dai is not None and self.interface_dai._has_data():
return True
if self.interface_flooding is not None:
return True
if self.interface_flooding_unknown_unicast is not None:
return True
if self.interface_igmp_snoop is not None:
return True
if self.interface_ip_source_guard is not None and self.interface_ip_source_guard._has_data():
return True
if self.interface_mac is not None and self.interface_mac._has_data():
return True
if self.interface_mld_snoop is not None:
return True
if self.interface_profile is not None and self.interface_profile._has_data():
return True
if self.split_horizon is not None and self.split_horizon._has_data():
return True
if self.static_mac_addresses is not None and self.static_mac_addresses._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-attachment-circuits'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_attachment_circuit is not None:
for child_ref in self.bd_attachment_circuit:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits']['meta_info']
class BdPseudowireEvpns(object):
"""
List of EVPN pseudowires
.. attribute:: bd_pseudowire_evpn
EVPN Pseudowire configuration
**type**\: list of :py:class:`BdPseudowireEvpn <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bd_pseudowire_evpn = YList()
self.bd_pseudowire_evpn.parent = self
self.bd_pseudowire_evpn.name = 'bd_pseudowire_evpn'
class BdPseudowireEvpn(object):
"""
EVPN Pseudowire configuration
.. attribute:: acid <key>
AC ID
**type**\: int
**range:** 1..4294967295
.. attribute:: eviid <key>
Ethernet VPN ID
**type**\: int
**range:** 1..65534
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acid = None
self.eviid = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.acid is None:
raise YPYModelError('Key property acid is None')
if self.eviid is None:
raise YPYModelError('Key property eviid is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-pseudowire-evpn[Cisco-IOS-XR-l2vpn-cfg:acid = ' + str(self.acid) + '][Cisco-IOS-XR-l2vpn-cfg:eviid = ' + str(self.eviid) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.acid is not None:
return True
if self.eviid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bd-pseudowire-evpns'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bd_pseudowire_evpn is not None:
for child_ref in self.bd_pseudowire_evpn:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns']['meta_info']
class IpSourceGuard(object):
"""
IP Source Guard
.. attribute:: enable
Enable IP Source Guard
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: logging
Enable Logging
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.logging = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:ip-source-guard'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.logging is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard']['meta_info']
class Dai(object):
"""
Dynamic ARP Inspection
.. attribute:: dai_address_validation
Address Validation
**type**\: :py:class:`DaiAddressValidation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation>`
.. attribute:: enable
Enable Dynamic ARP Inspection
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: logging
Enable Logging
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.dai_address_validation = L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation()
self.dai_address_validation.parent = self
self.enable = None
self.logging = None
class DaiAddressValidation(object):
"""
Address Validation
.. attribute:: destination_mac_verification
Enable Destination MAC Verification
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable
Enable Address Validation
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ipv4_verification
Enable IPv4 Verification
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: source_mac_verification
Enable Source MAC Verification
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.destination_mac_verification = None
self.enable = None
self.ipv4_verification = None
self.source_mac_verification = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:dai-address-validation'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.destination_mac_verification is not None:
return True
if self.enable is not None:
return True
if self.ipv4_verification is not None:
return True
if self.source_mac_verification is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:dai'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dai_address_validation is not None and self.dai_address_validation._has_data():
return True
if self.enable is not None:
return True
if self.logging is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai']['meta_info']
class RoutedInterfaces(object):
"""
Bridge Domain Routed Interface Table
.. attribute:: routed_interface
Bridge Domain Routed Interface
**type**\: list of :py:class:`RoutedInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.routed_interface = YList()
self.routed_interface.parent = self
self.routed_interface.name = 'routed_interface'
class RoutedInterface(object):
"""
Bridge Domain Routed Interface
.. attribute:: interface_name <key>
The name of the Routed Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:routed-interface[Cisco-IOS-XR-l2vpn-cfg:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:routed-interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.routed_interface is not None:
for child_ref in self.routed_interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bridge-domain[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.bd_attachment_circuits is not None and self.bd_attachment_circuits._has_data():
return True
if self.bd_pseudowire_evpns is not None and self.bd_pseudowire_evpns._has_data():
return True
if self.bd_pseudowires is not None and self.bd_pseudowires._has_data():
return True
if self.bd_storm_controls is not None and self.bd_storm_controls._has_data():
return True
if self.bridge_domain_evis is not None and self.bridge_domain_evis._has_data():
return True
if self.bridge_domain_mac is not None and self.bridge_domain_mac._has_data():
return True
if self.bridge_domain_mtu is not None:
return True
if self.bridge_domain_pbb is not None and self.bridge_domain_pbb._has_data():
return True
if self.coupled_mode is not None:
return True
if self.dai is not None and self.dai._has_data():
return True
if self.dhcp is not None:
return True
if self.flooding is not None:
return True
if self.flooding_unknown_unicast is not None:
return True
if self.igmp_snooping is not None:
return True
if self.igmp_snooping_disable is not None:
return True
if self.ip_source_guard is not None and self.ip_source_guard._has_data():
return True
if self.member_vnis is not None and self.member_vnis._has_data():
return True
if self.mld_snooping is not None:
return True
if self.nv_satellite is not None and self.nv_satellite._has_data():
return True
if self.routed_interfaces is not None and self.routed_interfaces._has_data():
return True
if self.shutdown is not None:
return True
if self.transport_mode is not None:
return True
if self.vfis is not None and self.vfis._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:bridge-domains'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bridge_domain is not None:
for child_ref in self.bridge_domain:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:bridge-domain-groups/Cisco-IOS-XR-l2vpn-cfg:bridge-domain-group[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.bridge_domains is not None and self.bridge_domains._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:bridge-domain-groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bridge_domain_group is not None:
for child_ref in self.bridge_domain_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.BridgeDomainGroups']['meta_info']
class PseudowireClasses(object):
"""
List of pseudowire classes
.. attribute:: pseudowire_class
Pseudowire class
**type**\: list of :py:class:`PseudowireClass <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pseudowire_class = YList()
self.pseudowire_class.parent = self
self.pseudowire_class.name = 'pseudowire_class'
class PseudowireClass(object):
"""
Pseudowire class
.. attribute:: name <key>
Name of the pseudowire class
**type**\: str
**length:** 0..32
.. attribute:: backup_disable_delay
Back Up Pseudowire class
**type**\: :py:class:`BackupDisableDelay <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay>`
.. attribute:: enable
Enable pseudowire class
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: l2tpv3_encapsulation
L2TPv3 encapsulation
**type**\: :py:class:`L2Tpv3Encapsulation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation>`
.. attribute:: mac_withdraw
Enable backup MAC withdraw
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: mpls_encapsulation
MPLS encapsulation
**type**\: :py:class:`MplsEncapsulation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.backup_disable_delay = L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay()
self.backup_disable_delay.parent = self
self.enable = None
self.l2tpv3_encapsulation = L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation()
self.l2tpv3_encapsulation.parent = self
self.mac_withdraw = None
self.mpls_encapsulation = L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation()
self.mpls_encapsulation.parent = self
class L2Tpv3Encapsulation(object):
"""
L2TPv3 encapsulation
.. attribute:: cookie_size
Cookie size
**type**\: :py:class:`L2TpCookieSizeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2TpCookieSizeEnum>`
**default value**\: zero
.. attribute:: df_bit_set
Set the do not fragment bit to 1
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable
Enable L2TPv3 encapsulation
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: path_mtu
Path maximum transmission unit
**type**\: :py:class:`PathMtu <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu>`
.. attribute:: sequencing
Sequencing
**type**\: :py:class:`Sequencing <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing>`
.. attribute:: signaling_protocol
L2TPv3 signaling protocol
**type**\: :py:class:`SignalingProtocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol>`
.. attribute:: source_address
Source IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: time_to_live
Time to live
**type**\: int
**range:** 1..255
.. attribute:: transport_mode
Transport mode
**type**\: :py:class:`TransportModeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.TransportModeEnum>`
.. attribute:: type_of_service
Type of service
**type**\: :py:class:`TypeOfService <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.cookie_size = None
self.df_bit_set = None
self.enable = None
self.path_mtu = L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu()
self.path_mtu.parent = self
self.sequencing = L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing()
self.sequencing.parent = self
self.signaling_protocol = L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol()
self.signaling_protocol.parent = self
self.source_address = None
self.time_to_live = None
self.transport_mode = None
self.type_of_service = L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService()
self.type_of_service.parent = self
class Sequencing(object):
"""
Sequencing
.. attribute:: resync_threshold
Out of sequence threshold
**type**\: int
**range:** 5..65535
**default value**\: 5
.. attribute:: sequencing
Sequencing
**type**\: :py:class:`L2Tpv3SequencingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Tpv3SequencingEnum>`
**default value**\: off
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.resync_threshold = None
self.sequencing = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:sequencing'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.resync_threshold is not None:
return True
if self.sequencing is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing']['meta_info']
class TypeOfService(object):
"""
Type of service
.. attribute:: type_of_service_mode
Type of service mode
**type**\: :py:class:`TypeOfServiceModeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.TypeOfServiceModeEnum>`
.. attribute:: type_of_service_value
Type of service value
**type**\: int
**range:** 0..255
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.type_of_service_mode = None
self.type_of_service_value = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:type-of-service'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.type_of_service_mode is not None:
return True
if self.type_of_service_value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService']['meta_info']
class SignalingProtocol(object):
"""
L2TPv3 signaling protocol
.. attribute:: l2tpv3_class_name
Name of the L2TPv3 class name
**type**\: str
**length:** 0..32
.. attribute:: protocol
L2TPv3 signaling protocol
**type**\: :py:class:`L2TpSignalingProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2TpSignalingProtocolEnum>`
**default value**\: l2tpv3
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.l2tpv3_class_name = None
self.protocol = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:signaling-protocol'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.l2tpv3_class_name is not None:
return True
if self.protocol is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol']['meta_info']
class PathMtu(object):
"""
Path maximum transmission unit
.. attribute:: enable
Enable path MTU
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: max_path_mtu
Maximum path maximum transmission unit
**type**\: int
**range:** 68..65535
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.max_path_mtu = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:path-mtu'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.max_path_mtu is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:l2tpv3-encapsulation'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.cookie_size is not None:
return True
if self.df_bit_set is not None:
return True
if self.enable is not None:
return True
if self.path_mtu is not None and self.path_mtu._has_data():
return True
if self.sequencing is not None and self.sequencing._has_data():
return True
if self.signaling_protocol is not None and self.signaling_protocol._has_data():
return True
if self.source_address is not None:
return True
if self.time_to_live is not None:
return True
if self.transport_mode is not None:
return True
if self.type_of_service is not None and self.type_of_service._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
class BackupDisableDelay(object):
"""
Back Up Pseudowire class
.. attribute:: disable_backup
Disable backup delay
**type**\: int
**range:** 0..180
.. attribute:: type
Delay or Never
**type**\: :py:class:`BackupDisableEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BackupDisableEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.disable_backup = None
self.type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:backup-disable-delay'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.disable_backup is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay']['meta_info']
class MplsEncapsulation(object):
"""
MPLS encapsulation
.. attribute:: control_word
Enable control word
**type**\: :py:class:`ControlWordEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.ControlWordEnum>`
.. attribute:: enable
Enable MPLS encapsulation
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: load_balance_group
Load Balancing
**type**\: :py:class:`LoadBalanceGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup>`
.. attribute:: mpls_redundancy
Redundancy options for MPLS encapsulation
**type**\: :py:class:`MplsRedundancy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy>`
.. attribute:: preferred_path
Preferred path
**type**\: :py:class:`PreferredPath <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath>`
.. attribute:: pw_switching_tlv
Pseudowire Switching Point Tlv
**type**\: :py:class:`PwSwitchingPointTlvEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.PwSwitchingPointTlvEnum>`
.. attribute:: sequencing
Sequencing
**type**\: :py:class:`Sequencing <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing>`
.. attribute:: signaling_protocol
MPLS signaling protocol
**type**\: :py:class:`MplsSignalingProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MplsSignalingProtocolEnum>`
**default value**\: ldp
.. attribute:: source_address
Source IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: static_tag_rewrite
Static Tag rewrite
**type**\: int
**range:** 1..4094
.. attribute:: transport_mode
Transport mode
**type**\: :py:class:`TransportModeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.TransportModeEnum>`
.. attribute:: vccv_type
VCCV verification type
**type**\: :py:class:`VccvVerificationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.VccvVerificationEnum>`
**default value**\: lsp-ping
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.control_word = None
self.enable = None
self.load_balance_group = L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup()
self.load_balance_group.parent = self
self.mpls_redundancy = L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy()
self.mpls_redundancy.parent = self
self.preferred_path = L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath()
self.preferred_path.parent = self
self.pw_switching_tlv = None
self.sequencing = L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing()
self.sequencing.parent = self
self.signaling_protocol = None
self.source_address = None
self.static_tag_rewrite = None
self.transport_mode = None
self.vccv_type = None
class Sequencing(object):
"""
Sequencing
.. attribute:: resync_threshold
Out of sequence threshold
**type**\: int
**range:** 5..65535
**default value**\: 5
.. attribute:: sequencing
Sequencing
**type**\: :py:class:`MplsSequencingEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.MplsSequencingEnum>`
**default value**\: off
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.resync_threshold = None
self.sequencing = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:sequencing'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.resync_threshold is not None:
return True
if self.sequencing is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing']['meta_info']
class MplsRedundancy(object):
"""
Redundancy options for MPLS encapsulation
.. attribute:: redundancy_initial_delay
Initial delay before activating the redundant PW, in seconds
**type**\: int
**range:** 0..120
**units**\: second
.. attribute:: redundancy_one_way
Force one\-way PW redundancy behavior in Redundancy Group
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.redundancy_initial_delay = None
self.redundancy_one_way = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mpls-redundancy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.redundancy_initial_delay is not None:
return True
if self.redundancy_one_way is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy']['meta_info']
class PreferredPath(object):
"""
Preferred path
.. attribute:: fallback_disable
Fallback disable
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: interface_tunnel_number
Interface Tunnel number for preferred path
**type**\: int
**range:** 0..65535
.. attribute:: type
Preferred Path Type
**type**\: :py:class:`PreferredPathEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.PreferredPathEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.fallback_disable = None
self.interface_tunnel_number = None
self.type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:preferred-path'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.fallback_disable is not None:
return True
if self.interface_tunnel_number is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath']['meta_info']
class LoadBalanceGroup(object):
"""
Load Balancing
.. attribute:: flow_label_load_balance
Enable Flow Label based load balancing
**type**\: :py:class:`FlowLabelLoadBalance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance>`
.. attribute:: flow_label_load_balance_code
Enable Legacy Flow Label TLV code
**type**\: :py:class:`FlowLabelTlvCodeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.FlowLabelTlvCodeEnum>`
.. attribute:: pw_label_load_balance
Enable PW Label based Load Balancing
**type**\: :py:class:`LoadBalanceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.LoadBalanceEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.flow_label_load_balance = L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance()
self.flow_label_load_balance.parent = self
self.flow_label_load_balance_code = None
self.pw_label_load_balance = None
class FlowLabelLoadBalance(object):
"""
Enable Flow Label based load balancing
.. attribute:: flow_label
Flow Label load balance type
**type**\: :py:class:`FlowLabelLoadBalanceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.FlowLabelLoadBalanceEnum>`
.. attribute:: static
Static Flow Label
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.flow_label = None
self.static = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:flow-label-load-balance'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.flow_label is not None:
return True
if self.static is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:load-balance-group'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.flow_label_load_balance is not None and self.flow_label_load_balance._has_data():
return True
if self.flow_label_load_balance_code is not None:
return True
if self.pw_label_load_balance is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:mpls-encapsulation'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.control_word is not None:
return True
if self.enable is not None:
return True
if self.load_balance_group is not None and self.load_balance_group._has_data():
return True
if self.mpls_redundancy is not None and self.mpls_redundancy._has_data():
return True
if self.preferred_path is not None and self.preferred_path._has_data():
return True
if self.pw_switching_tlv is not None:
return True
if self.sequencing is not None and self.sequencing._has_data():
return True
if self.signaling_protocol is not None:
return True
if self.source_address is not None:
return True
if self.static_tag_rewrite is not None:
return True
if self.transport_mode is not None:
return True
if self.vccv_type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:pseudowire-classes/Cisco-IOS-XR-l2vpn-cfg:pseudowire-class[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.backup_disable_delay is not None and self.backup_disable_delay._has_data():
return True
if self.enable is not None:
return True
if self.l2tpv3_encapsulation is not None and self.l2tpv3_encapsulation._has_data():
return True
if self.mac_withdraw is not None:
return True
if self.mpls_encapsulation is not None and self.mpls_encapsulation._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:pseudowire-classes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.pseudowire_class is not None:
for child_ref in self.pseudowire_class:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.PseudowireClasses']['meta_info']
class FlexibleXconnectServiceTable(object):
"""
List of Flexible XConnect Services
.. attribute:: vlan_unaware_flexible_xconnect_services
List of Vlan\-Unaware Flexible XConnect Services
**type**\: :py:class:`VlanUnawareFlexibleXconnectServices <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.vlan_unaware_flexible_xconnect_services = L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices()
self.vlan_unaware_flexible_xconnect_services.parent = self
class VlanUnawareFlexibleXconnectServices(object):
"""
List of Vlan\-Unaware Flexible XConnect
Services
.. attribute:: vlan_unaware_flexible_xconnect_service
Flexible XConnect Service
**type**\: list of :py:class:`VlanUnawareFlexibleXconnectService <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.vlan_unaware_flexible_xconnect_service = YList()
self.vlan_unaware_flexible_xconnect_service.parent = self
self.vlan_unaware_flexible_xconnect_service.name = 'vlan_unaware_flexible_xconnect_service'
class VlanUnawareFlexibleXconnectService(object):
"""
Flexible XConnect Service
.. attribute:: name <key>
Name of the Flexible XConnect Service
**type**\: str
**length:** 0..23
.. attribute:: vlan_unaware_fxc_attachment_circuits
List of attachment circuits
**type**\: :py:class:`VlanUnawareFxcAttachmentCircuits <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits>`
.. attribute:: vlan_unaware_fxc_pseudowire_evpns
List of EVPN Services
**type**\: :py:class:`VlanUnawareFxcPseudowireEvpns <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.vlan_unaware_fxc_attachment_circuits = L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits()
self.vlan_unaware_fxc_attachment_circuits.parent = self
self.vlan_unaware_fxc_pseudowire_evpns = L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns()
self.vlan_unaware_fxc_pseudowire_evpns.parent = self
class VlanUnawareFxcAttachmentCircuits(object):
"""
List of attachment circuits
.. attribute:: vlan_unaware_fxc_attachment_circuit
Attachment circuit interface
**type**\: list of :py:class:`VlanUnawareFxcAttachmentCircuit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.vlan_unaware_fxc_attachment_circuit = YList()
self.vlan_unaware_fxc_attachment_circuit.parent = self
self.vlan_unaware_fxc_attachment_circuit.name = 'vlan_unaware_fxc_attachment_circuit'
class VlanUnawareFxcAttachmentCircuit(object):
"""
Attachment circuit interface
.. attribute:: name <key>
Name of the attachment circuit interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vlan-unaware-fxc-attachment-circuit[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vlan-unaware-fxc-attachment-circuits'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vlan_unaware_fxc_attachment_circuit is not None:
for child_ref in self.vlan_unaware_fxc_attachment_circuit:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits']['meta_info']
class VlanUnawareFxcPseudowireEvpns(object):
"""
List of EVPN Services
.. attribute:: vlan_unaware_fxc_pseudowire_evpn
EVPN FXC Service Configuration
**type**\: list of :py:class:`VlanUnawareFxcPseudowireEvpn <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.vlan_unaware_fxc_pseudowire_evpn = YList()
self.vlan_unaware_fxc_pseudowire_evpn.parent = self
self.vlan_unaware_fxc_pseudowire_evpn.name = 'vlan_unaware_fxc_pseudowire_evpn'
class VlanUnawareFxcPseudowireEvpn(object):
"""
EVPN FXC Service Configuration
.. attribute:: acid <key>
AC ID
**type**\: int
**range:** 1..4294967295
.. attribute:: eviid <key>
Ethernet VPN ID
**type**\: int
**range:** 1..65534
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acid = None
self.eviid = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.acid is None:
raise YPYModelError('Key property acid is None')
if self.eviid is None:
raise YPYModelError('Key property eviid is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vlan-unaware-fxc-pseudowire-evpn[Cisco-IOS-XR-l2vpn-cfg:acid = ' + str(self.acid) + '][Cisco-IOS-XR-l2vpn-cfg:eviid = ' + str(self.eviid) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.acid is not None:
return True
if self.eviid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:vlan-unaware-fxc-pseudowire-evpns'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vlan_unaware_fxc_pseudowire_evpn is not None:
for child_ref in self.vlan_unaware_fxc_pseudowire_evpn:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:flexible-xconnect-service-table/Cisco-IOS-XR-l2vpn-cfg:vlan-unaware-flexible-xconnect-services/Cisco-IOS-XR-l2vpn-cfg:vlan-unaware-flexible-xconnect-service[Cisco-IOS-XR-l2vpn-cfg:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.vlan_unaware_fxc_attachment_circuits is not None and self.vlan_unaware_fxc_attachment_circuits._has_data():
return True
if self.vlan_unaware_fxc_pseudowire_evpns is not None and self.vlan_unaware_fxc_pseudowire_evpns._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:flexible-xconnect-service-table/Cisco-IOS-XR-l2vpn-cfg:vlan-unaware-flexible-xconnect-services'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vlan_unaware_flexible_xconnect_service is not None:
for child_ref in self.vlan_unaware_flexible_xconnect_service:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:flexible-xconnect-service-table'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vlan_unaware_flexible_xconnect_services is not None and self.vlan_unaware_flexible_xconnect_services._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.FlexibleXconnectServiceTable']['meta_info']
class Redundancy(object):
"""
Redundancy groups
.. attribute:: enable
Enable redundancy groups
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: iccp_redundancy_groups
List of Inter\-Chassis Communication Protocol redundancy groups
**type**\: :py:class:`IccpRedundancyGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.Redundancy.IccpRedundancyGroups>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.iccp_redundancy_groups = L2Vpn.Database.Redundancy.IccpRedundancyGroups()
self.iccp_redundancy_groups.parent = self
class IccpRedundancyGroups(object):
"""
List of Inter\-Chassis Communication Protocol
redundancy groups
.. attribute:: iccp_redundancy_group
ICCP Redundancy group
**type**\: list of :py:class:`IccpRedundancyGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.iccp_redundancy_group = YList()
self.iccp_redundancy_group.parent = self
self.iccp_redundancy_group.name = 'iccp_redundancy_group'
class IccpRedundancyGroup(object):
"""
ICCP Redundancy group
.. attribute:: group_id <key>
Group ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: iccp_interfaces
List of interfaces
**type**\: :py:class:`IccpInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces>`
.. attribute:: multi_homing_node_id
ICCP\-based service multi\-homing node ID
**type**\: int
**range:** 0..254
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.group_id = None
self.iccp_interfaces = L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces()
self.iccp_interfaces.parent = self
self.multi_homing_node_id = None
class IccpInterfaces(object):
"""
List of interfaces
.. attribute:: iccp_interface
Interface name
**type**\: list of :py:class:`IccpInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.iccp_interface = YList()
self.iccp_interface.parent = self
self.iccp_interface.name = 'iccp_interface'
class IccpInterface(object):
"""
Interface name
.. attribute:: interface_name <key>
Interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: mac_flush_tcn
Enable STP\-TCN MAC flushing
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: primary_vlan_range
Primary VLAN range, in the form of 1\-3,5 ,8\-11
**type**\: str
.. attribute:: recovery_delay
Failure clear recovery delay
**type**\: int
**range:** 30..3600
**default value**\: 180
.. attribute:: secondary_vlan_range
Secondary VLAN range, in the form of 1\-3,5 ,8\-11
**type**\: str
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.mac_flush_tcn = None
self.primary_vlan_range = None
self.recovery_delay = None
self.secondary_vlan_range = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:iccp-interface[Cisco-IOS-XR-l2vpn-cfg:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.mac_flush_tcn is not None:
return True
if self.primary_vlan_range is not None:
return True
if self.recovery_delay is not None:
return True
if self.secondary_vlan_range is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:iccp-interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.iccp_interface is not None:
for child_ref in self.iccp_interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces']['meta_info']
@property
def _common_path(self):
if self.group_id is None:
raise YPYModelError('Key property group_id is None')
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:redundancy/Cisco-IOS-XR-l2vpn-cfg:iccp-redundancy-groups/Cisco-IOS-XR-l2vpn-cfg:iccp-redundancy-group[Cisco-IOS-XR-l2vpn-cfg:group-id = ' + str(self.group_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.group_id is not None:
return True
if self.iccp_interfaces is not None and self.iccp_interfaces._has_data():
return True
if self.multi_homing_node_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:redundancy/Cisco-IOS-XR-l2vpn-cfg:iccp-redundancy-groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.iccp_redundancy_group is not None:
for child_ref in self.iccp_redundancy_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database/Cisco-IOS-XR-l2vpn-cfg:redundancy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.iccp_redundancy_groups is not None and self.iccp_redundancy_groups._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database.Redundancy']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:database'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bridge_domain_groups is not None and self.bridge_domain_groups._has_data():
return True
if self.flexible_xconnect_service_table is not None and self.flexible_xconnect_service_table._has_data():
return True
if self.g8032_rings is not None and self.g8032_rings._has_data():
return True
if self.pseudowire_classes is not None and self.pseudowire_classes._has_data():
return True
if self.redundancy is not None and self.redundancy._has_data():
return True
if self.xconnect_groups is not None and self.xconnect_groups._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Database']['meta_info']
class Pbb(object):
"""
L2VPN PBB Global
.. attribute:: backbone_source_mac
Backbone Source MAC
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.backbone_source_mac = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:pbb'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.backbone_source_mac is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Pbb']['meta_info']
class AutoDiscovery(object):
"""
Global auto\-discovery attributes
.. attribute:: bgp_signaling
Global bgp signaling attributes
**type**\: :py:class:`BgpSignaling <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.AutoDiscovery.BgpSignaling>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bgp_signaling = L2Vpn.AutoDiscovery.BgpSignaling()
self.bgp_signaling.parent = self
class BgpSignaling(object):
"""
Global bgp signaling attributes
.. attribute:: mtu_mismatch_ignore
Ignore MTU mismatch for auto\-discovered pseudowires
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.mtu_mismatch_ignore = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:auto-discovery/Cisco-IOS-XR-l2vpn-cfg:bgp-signaling'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mtu_mismatch_ignore is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.AutoDiscovery.BgpSignaling']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:auto-discovery'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bgp_signaling is not None and self.bgp_signaling._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.AutoDiscovery']['meta_info']
class Utility(object):
"""
L2VPN utilities
.. attribute:: logging
L2VPN logging utility
**type**\: :py:class:`Logging <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Utility.Logging>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.logging = L2Vpn.Utility.Logging()
self.logging.parent = self
class Logging(object):
"""
L2VPN logging utility
.. attribute:: bridge_domain_state_change
Enable Bridge Domain state change logging
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: nsr_state_change
Enable Non Stop Routing state change logging
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: pseudowire_state_change
Enable pseudowire state change logging
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: pwhe_replication_state_change
Enable PW\-HE Replication state change logging
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vfi
Enable VFI state change logging
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bridge_domain_state_change = None
self.nsr_state_change = None
self.pseudowire_state_change = None
self.pwhe_replication_state_change = None
self.vfi = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:utility/Cisco-IOS-XR-l2vpn-cfg:logging'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bridge_domain_state_change is not None:
return True
if self.nsr_state_change is not None:
return True
if self.pseudowire_state_change is not None:
return True
if self.pwhe_replication_state_change is not None:
return True
if self.vfi is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Utility.Logging']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:utility'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.logging is not None and self.logging._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Utility']['meta_info']
class Snmp(object):
"""
SNMP related configuration
.. attribute:: mib
MIB related configuration
**type**\: :py:class:`Mib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Snmp.Mib>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.mib = L2Vpn.Snmp.Mib()
self.mib.parent = self
class Mib(object):
"""
MIB related configuration
.. attribute:: mib_interface
Interface related configuration for MIB
**type**\: :py:class:`MibInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Snmp.Mib.MibInterface>`
.. attribute:: mib_pseudowire
Pseudowire related configuration for MIB
**type**\: :py:class:`MibPseudowire <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Snmp.Mib.MibPseudowire>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.mib_interface = L2Vpn.Snmp.Mib.MibInterface()
self.mib_interface.parent = self
self.mib_pseudowire = L2Vpn.Snmp.Mib.MibPseudowire()
self.mib_pseudowire.parent = self
class MibInterface(object):
"""
Interface related configuration for MIB
.. attribute:: format
MIB interface name output format
**type**\: :py:class:`Format <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.L2Vpn.Snmp.Mib.MibInterface.Format>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.format = L2Vpn.Snmp.Mib.MibInterface.Format()
self.format.parent = self
class Format(object):
"""
MIB interface name output format
.. attribute:: external_interface_format
Set MIB interface name output in slash format (/)
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.external_interface_format = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:snmp/Cisco-IOS-XR-l2vpn-cfg:mib/Cisco-IOS-XR-l2vpn-cfg:mib-interface/Cisco-IOS-XR-l2vpn-cfg:format'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.external_interface_format is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Snmp.Mib.MibInterface.Format']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:snmp/Cisco-IOS-XR-l2vpn-cfg:mib/Cisco-IOS-XR-l2vpn-cfg:mib-interface'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.format is not None and self.format._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Snmp.Mib.MibInterface']['meta_info']
class MibPseudowire(object):
"""
Pseudowire related configuration for MIB
.. attribute:: statistics
Enable pseudowire statistics in MIB output
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.statistics = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:snmp/Cisco-IOS-XR-l2vpn-cfg:mib/Cisco-IOS-XR-l2vpn-cfg:mib-pseudowire'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.statistics is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Snmp.Mib.MibPseudowire']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:snmp/Cisco-IOS-XR-l2vpn-cfg:mib'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mib_interface is not None and self.mib_interface._has_data():
return True
if self.mib_pseudowire is not None and self.mib_pseudowire._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Snmp.Mib']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn/Cisco-IOS-XR-l2vpn-cfg:snmp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mib is not None and self.mib._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn.Snmp']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:l2vpn'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.auto_discovery is not None and self.auto_discovery._has_data():
return True
if self.capability is not None:
return True
if self.database is not None and self.database._has_data():
return True
if self.enable is not None:
return True
if self.l2vpn_router_id is not None:
return True
if self.load_balance is not None:
return True
if self.mspw_description is not None:
return True
if self.mtu_mismatch_ignore is not None:
return True
if self.neighbor is not None and self.neighbor._has_data():
return True
if self.nsr is not None:
return True
if self.pbb is not None and self.pbb._has_data():
return True
if self.pw_grouping is not None:
return True
if self.pw_routing is not None and self.pw_routing._has_data():
return True
if self.pw_status_disable is not None:
return True
if self.pwoam_refresh is not None:
return True
if self.snmp is not None and self.snmp._has_data():
return True
if self.tcn_propagation is not None:
return True
if self.utility is not None and self.utility._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['L2Vpn']['meta_info']
class GenericInterfaceLists(object):
"""
generic interface lists
.. attribute:: generic_interface
Bridge group
**type**\: list of :py:class:`GenericInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.GenericInterfaceLists.GenericInterface>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.generic_interface = YList()
self.generic_interface.parent = self
self.generic_interface.name = 'generic_interface'
class GenericInterface(object):
"""
Bridge group
.. attribute:: generic_interface_list_name <key>
Name of the interface list
**type**\: str
**length:** 0..32
.. attribute:: enable
Enable interface list
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: interfaces
Interface table
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.GenericInterfaceLists.GenericInterface.Interfaces>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.generic_interface_list_name = None
self.enable = None
self.interfaces = GenericInterfaceLists.GenericInterface.Interfaces()
self.interfaces.parent = self
class Interfaces(object):
"""
Interface table
.. attribute:: interface
Interface
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.GenericInterfaceLists.GenericInterface.Interfaces.Interface>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
Interface
.. attribute:: interface_name <key>
Name of the interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: enable
Enable interface
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interface[Cisco-IOS-XR-l2vpn-cfg:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['GenericInterfaceLists.GenericInterface.Interfaces.Interface']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['GenericInterfaceLists.GenericInterface.Interfaces']['meta_info']
@property
def _common_path(self):
if self.generic_interface_list_name is None:
raise YPYModelError('Key property generic_interface_list_name is None')
return '/Cisco-IOS-XR-l2vpn-cfg:generic-interface-lists/Cisco-IOS-XR-l2vpn-cfg:generic-interface[Cisco-IOS-XR-l2vpn-cfg:generic-interface-list-name = ' + str(self.generic_interface_list_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.generic_interface_list_name is not None:
return True
if self.enable is not None:
return True
if self.interfaces is not None and self.interfaces._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['GenericInterfaceLists.GenericInterface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:generic-interface-lists'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.generic_interface is not None:
for child_ref in self.generic_interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['GenericInterfaceLists']['meta_info']
class Evpn(object):
"""
evpn
.. attribute:: enable
Enable EVPN feature
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: evpn_tables
EVPN submodes
**type**\: :py:class:`EvpnTables <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.enable = None
self.evpn_tables = Evpn.EvpnTables()
self.evpn_tables.parent = self
class EvpnTables(object):
"""
EVPN submodes
.. attribute:: evpn_interfaces
Attachment Circuit interfaces
**type**\: :py:class:`EvpnInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnInterfaces>`
.. attribute:: evpn_load_balancing
Enter EVPN Loadbalancing configuration submode
**type**\: :py:class:`EvpnLoadBalancing <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnLoadBalancing>`
.. attribute:: evpn_timers
Enter EVPN timers configuration submode
**type**\: :py:class:`EvpnTimers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnTimers>`
.. attribute:: evpnbgp_auto_discovery
Enable Autodiscovery BGP in EVPN
**type**\: :py:class:`EvpnbgpAutoDiscovery <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnbgpAutoDiscovery>`
.. attribute:: evpnevis
Enter EVPN EVI configuration submode
**type**\: :py:class:`Evpnevis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.Evpnevis>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.evpn_interfaces = Evpn.EvpnTables.EvpnInterfaces()
self.evpn_interfaces.parent = self
self.evpn_load_balancing = Evpn.EvpnTables.EvpnLoadBalancing()
self.evpn_load_balancing.parent = self
self.evpn_timers = Evpn.EvpnTables.EvpnTimers()
self.evpn_timers.parent = self
self.evpnbgp_auto_discovery = Evpn.EvpnTables.EvpnbgpAutoDiscovery()
self.evpnbgp_auto_discovery.parent = self
self.evpnevis = Evpn.EvpnTables.Evpnevis()
self.evpnevis.parent = self
class EvpnTimers(object):
"""
Enter EVPN timers configuration submode
.. attribute:: enable
Enable EVPN timers
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: evpn_peering
Global Peering timer
**type**\: int
**range:** 0..300
**default value**\: 3
.. attribute:: evpn_recovery
Global Recovery timer
**type**\: int
**range:** 20..3600
**default value**\: 30
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.evpn_peering = None
self.evpn_recovery = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:evpn/Cisco-IOS-XR-l2vpn-cfg:evpn-tables/Cisco-IOS-XR-l2vpn-cfg:evpn-timers'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.evpn_peering is not None:
return True
if self.evpn_recovery is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnTimers']['meta_info']
class Evpnevis(object):
"""
Enter EVPN EVI configuration submode
.. attribute:: evpnevi
Enter EVPN EVI configuration submode
**type**\: list of :py:class:`Evpnevi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.Evpnevis.Evpnevi>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.evpnevi = YList()
self.evpnevi.parent = self
self.evpnevi.name = 'evpnevi'
class Evpnevi(object):
"""
Enter EVPN EVI configuration submode
.. attribute:: eviid <key>
EVI ID
**type**\: int
**range:** 1..65534
.. attribute:: evi_load_balancing
Enter EVI Loadbalancing configuration submode
**type**\: :py:class:`EviLoadBalancing <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing>`
.. attribute:: evpn_evi_cw_disable
CW disable for EVPN EVI
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: evpnevi_description
Description for EVPN EVI
**type**\: str
**length:** 0..64
.. attribute:: evpnevibgp_auto_discovery
Enable Autodiscovery BGP in EVPN EVI
**type**\: :py:class:`EvpnevibgpAutoDiscovery <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.eviid = None
self.evi_load_balancing = Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing()
self.evi_load_balancing.parent = self
self.evpn_evi_cw_disable = None
self.evpnevi_description = None
self.evpnevibgp_auto_discovery = Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery()
self.evpnevibgp_auto_discovery.parent = self
class EviLoadBalancing(object):
"""
Enter EVI Loadbalancing configuration submode
.. attribute:: enable
Enable EVI Loadbalancing
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: evi_flow_label
Enable Flow Label based load balancing
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.evi_flow_label = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:evi-load-balancing'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.evi_flow_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing']['meta_info']
class EvpnevibgpAutoDiscovery(object):
"""
Enable Autodiscovery BGP in EVPN EVI
.. attribute:: enable
Enable Autodiscovery BGP
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: evpn_route_distinguisher
Route Distinguisher
**type**\: :py:class:`EvpnRouteDistinguisher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher>`
.. attribute:: evpn_route_targets
Route Target
**type**\: :py:class:`EvpnRouteTargets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets>`
.. attribute:: table_policy
Table Policy for installation of forwarding data to L2FIB
**type**\: str
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.evpn_route_distinguisher = Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher()
self.evpn_route_distinguisher.parent = self
self.evpn_route_targets = Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets()
self.evpn_route_targets.parent = self
self.table_policy = None
class EvpnRouteTargets(object):
"""
Route Target
.. attribute:: evpn_route_target
Name of the Route Target
**type**\: list of :py:class:`EvpnRouteTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.evpn_route_target = YList()
self.evpn_route_target.parent = self
self.evpn_route_target.name = 'evpn_route_target'
class EvpnRouteTarget(object):
"""
Name of the Route Target
.. attribute:: format <key>
Format of the route target
**type**\: :py:class:`BgpRouteTargetFormatEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteTargetFormatEnum>`
.. attribute:: role <key>
Role of the router target type
**type**\: :py:class:`BgpRouteTargetRoleEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteTargetRoleEnum>`
.. attribute:: stitching <key>
whether RT is Stitching RT
**type**\: :py:class:`BgpRouteTargetEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteTargetEnum>`
.. attribute:: ipv4_address
ipv4 address
**type**\: list of :py:class:`Ipv4Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address>`
.. attribute:: two_byte_as_or_four_byte_as
two byte as or four byte as
**type**\: list of :py:class:`TwoByteAsOrFourByteAs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.format = None
self.role = None
self.stitching = None
self.ipv4_address = YList()
self.ipv4_address.parent = self
self.ipv4_address.name = 'ipv4_address'
self.two_byte_as_or_four_byte_as = YList()
self.two_byte_as_or_four_byte_as.parent = self
self.two_byte_as_or_four_byte_as.name = 'two_byte_as_or_four_byte_as'
class TwoByteAsOrFourByteAs(object):
"""
two byte as or four byte as
.. attribute:: as_ <key>
Two byte or 4 byte AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index <key>
AS\:nn (hex or decimal format)
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.as_ = None
self.as_index = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.as_ is None:
raise YPYModelError('Key property as_ is None')
if self.as_index is None:
raise YPYModelError('Key property as_index is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:two-byte-as-or-four-byte-as[Cisco-IOS-XR-l2vpn-cfg:as = ' + str(self.as_) + '][Cisco-IOS-XR-l2vpn-cfg:as-index = ' + str(self.as_index) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.as_ is not None:
return True
if self.as_index is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs']['meta_info']
class Ipv4Address(object):
"""
ipv4 address
.. attribute:: addr_index <key>
Addr index
**type**\: int
**range:** 0..65535
.. attribute:: address <key>
IPV4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.addr_index = None
self.address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.addr_index is None:
raise YPYModelError('Key property addr_index is None')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:ipv4-address[Cisco-IOS-XR-l2vpn-cfg:addr-index = ' + str(self.addr_index) + '][Cisco-IOS-XR-l2vpn-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addr_index is not None:
return True
if self.address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.format is None:
raise YPYModelError('Key property format is None')
if self.role is None:
raise YPYModelError('Key property role is None')
if self.stitching is None:
raise YPYModelError('Key property stitching is None')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:evpn-route-target[Cisco-IOS-XR-l2vpn-cfg:format = ' + str(self.format) + '][Cisco-IOS-XR-l2vpn-cfg:role = ' + str(self.role) + '][Cisco-IOS-XR-l2vpn-cfg:stitching = ' + str(self.stitching) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.format is not None:
return True
if self.role is not None:
return True
if self.stitching is not None:
return True
if self.ipv4_address is not None:
for child_ref in self.ipv4_address:
if child_ref._has_data():
return True
if self.two_byte_as_or_four_byte_as is not None:
for child_ref in self.two_byte_as_or_four_byte_as:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:evpn-route-targets'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.evpn_route_target is not None:
for child_ref in self.evpn_route_target:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets']['meta_info']
class EvpnRouteDistinguisher(object):
"""
Route Distinguisher
.. attribute:: addr_index
Addr index
**type**\: int
**range:** 0..65535
.. attribute:: address
IPV4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: as_
Two byte or 4 byte AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index
AS\:nn (hex or decimal format)
**type**\: int
**range:** 0..4294967295
.. attribute:: type
Router Distinguisher Type
**type**\: :py:class:`BgpRouteDistinguisherEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteDistinguisherEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.addr_index = None
self.address = None
self.as_ = None
self.as_index = None
self.type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:evpn-route-distinguisher'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addr_index is not None:
return True
if self.address is not None:
return True
if self.as_ is not None:
return True
if self.as_index is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:evpnevibgp-auto-discovery'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.evpn_route_distinguisher is not None and self.evpn_route_distinguisher._has_data():
return True
if self.evpn_route_targets is not None and self.evpn_route_targets._has_data():
return True
if self.table_policy is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info']
@property
def _common_path(self):
if self.eviid is None:
raise YPYModelError('Key property eviid is None')
return '/Cisco-IOS-XR-l2vpn-cfg:evpn/Cisco-IOS-XR-l2vpn-cfg:evpn-tables/Cisco-IOS-XR-l2vpn-cfg:evpnevis/Cisco-IOS-XR-l2vpn-cfg:evpnevi[Cisco-IOS-XR-l2vpn-cfg:eviid = ' + str(self.eviid) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.eviid is not None:
return True
if self.evi_load_balancing is not None and self.evi_load_balancing._has_data():
return True
if self.evpn_evi_cw_disable is not None:
return True
if self.evpnevi_description is not None:
return True
if self.evpnevibgp_auto_discovery is not None and self.evpnevibgp_auto_discovery._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:evpn/Cisco-IOS-XR-l2vpn-cfg:evpn-tables/Cisco-IOS-XR-l2vpn-cfg:evpnevis'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.evpnevi is not None:
for child_ref in self.evpnevi:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.Evpnevis']['meta_info']
class EvpnLoadBalancing(object):
"""
Enter EVPN Loadbalancing configuration submode
.. attribute:: enable
Enable EVPN Loadbalancing
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: evpn_flow_label
Enable Flow Label based load balancing
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.evpn_flow_label = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:evpn/Cisco-IOS-XR-l2vpn-cfg:evpn-tables/Cisco-IOS-XR-l2vpn-cfg:evpn-load-balancing'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.evpn_flow_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnLoadBalancing']['meta_info']
class EvpnbgpAutoDiscovery(object):
"""
Enable Autodiscovery BGP in EVPN
.. attribute:: enable
Enable Autodiscovery BGP
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: evpn_route_distinguisher
Route Distinguisher
**type**\: :py:class:`EvpnRouteDistinguisher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.evpn_route_distinguisher = Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher()
self.evpn_route_distinguisher.parent = self
class EvpnRouteDistinguisher(object):
"""
Route Distinguisher
.. attribute:: addr_index
Addr index
**type**\: int
**range:** 0..65535
.. attribute:: address
IPV4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: as_
Two byte or 4 byte AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index
AS\:nn (hex or decimal format)
**type**\: int
**range:** 0..4294967295
.. attribute:: type
Router Distinguisher Type
**type**\: :py:class:`BgpRouteDistinguisherEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.BgpRouteDistinguisherEnum>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.addr_index = None
self.address = None
self.as_ = None
self.as_index = None
self.type = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:evpn/Cisco-IOS-XR-l2vpn-cfg:evpn-tables/Cisco-IOS-XR-l2vpn-cfg:evpnbgp-auto-discovery/Cisco-IOS-XR-l2vpn-cfg:evpn-route-distinguisher'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addr_index is not None:
return True
if self.address is not None:
return True
if self.as_ is not None:
return True
if self.as_index is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:evpn/Cisco-IOS-XR-l2vpn-cfg:evpn-tables/Cisco-IOS-XR-l2vpn-cfg:evpnbgp-auto-discovery'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.evpn_route_distinguisher is not None and self.evpn_route_distinguisher._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery']['meta_info']
class EvpnInterfaces(object):
"""
Attachment Circuit interfaces
.. attribute:: evpn_interface
Attachment circuit interface
**type**\: list of :py:class:`EvpnInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnInterfaces.EvpnInterface>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.evpn_interface = YList()
self.evpn_interface.parent = self
self.evpn_interface.name = 'evpn_interface'
class EvpnInterface(object):
"""
Attachment circuit interface
.. attribute:: interface_name <key>
Name of the attachment circuit interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: ethernet_segment
Enter Ethernet Segment configuration submode
**type**\: :py:class:`EthernetSegment <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment>`
.. attribute:: evpnac_timers
Enter Interface\-specific timers configuration submode
**type**\: :py:class:`EvpnacTimers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers>`
.. attribute:: mac_flush
Enable MVRP MAC Flush mode
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.ethernet_segment = Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment()
self.ethernet_segment.parent = self
self.evpnac_timers = Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers()
self.evpnac_timers.parent = self
self.mac_flush = None
class EvpnacTimers(object):
"""
Enter Interface\-specific timers configuration
submode
.. attribute:: enable
Enable Interface\-specific timers
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: evpnac_peering
Interface\-specific Peering timer
**type**\: int
**range:** 0..300
**default value**\: 3
.. attribute:: evpnac_recovery
Interface\-specific Recovery timer
**type**\: int
**range:** 20..3600
**default value**\: 30
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.evpnac_peering = None
self.evpnac_recovery = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:evpnac-timers'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.evpnac_peering is not None:
return True
if self.evpnac_recovery is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers']['meta_info']
class EthernetSegment(object):
"""
Enter Ethernet Segment configuration submode
.. attribute:: backbone_source_mac
Backbone Source MAC
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: enable
Enable Ethernet Segment
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: es_import_route_target
ES\-Import Route Target
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: force_single_homed
Force ethernet segment to remain single\-homed
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: identifier_type0
Ethernet segment identifier (Type 0)
**type**\: :py:class:`IdentifierType0 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0>`
.. attribute:: load_balancing_per_service
Enable per service load balancing mode
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: manual_service_carving
Enter Manual service carving configuration submode
**type**\: :py:class:`ManualServiceCarving <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.backbone_source_mac = None
self.enable = None
self.es_import_route_target = None
self.force_single_homed = None
self.identifier_type0 = Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0()
self.identifier_type0.parent = self
self.load_balancing_per_service = None
self.manual_service_carving = Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving()
self.manual_service_carving.parent = self
class IdentifierType0(object):
"""
Ethernet segment identifier (Type 0)
.. attribute:: bytes1
Type 0's 1st Byte
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
.. attribute:: bytes23
Type 0's 2nd and 3rd Bytes
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**units**\: byte
.. attribute:: bytes45
Type 0's 4th and 5th Bytes
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**units**\: byte
.. attribute:: bytes67
Type 0's 6th and 7th Bytes
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**units**\: byte
.. attribute:: bytes89
Type 0's 8th and 9th Bytes
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**units**\: byte
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bytes1 = None
self.bytes23 = None
self.bytes45 = None
self.bytes67 = None
self.bytes89 = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:identifier-type0'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.bytes1 is not None:
return True
if self.bytes23 is not None:
return True
if self.bytes45 is not None:
return True
if self.bytes67 is not None:
return True
if self.bytes89 is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0']['meta_info']
class ManualServiceCarving(object):
"""
Enter Manual service carving configuration
submode
.. attribute:: enable
Enable Manual service carving
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: service_list
Manual service carving primary,secondary lists
**type**\: :py:class:`ServiceList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg.Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList>`
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.enable = None
self.service_list = Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList()
self.service_list.parent = self
class ServiceList(object):
"""
Manual service carving primary,secondary
lists
.. attribute:: primary
Primary services list
**type**\: str
**length:** 0..150
.. attribute:: secondary
Secondary services list
**type**\: str
**length:** 0..150
"""
_prefix = 'l2vpn-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.primary = None
self.secondary = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:service-list'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.primary is not None:
return True
if self.secondary is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:manual-service-carving'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.service_list is not None and self.service_list._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-l2vpn-cfg:ethernet-segment'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.backbone_source_mac is not None:
return True
if self.enable is not None:
return True
if self.es_import_route_target is not None:
return True
if self.force_single_homed is not None:
return True
if self.identifier_type0 is not None and self.identifier_type0._has_data():
return True
if self.load_balancing_per_service is not None:
return True
if self.manual_service_carving is not None and self.manual_service_carving._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return '/Cisco-IOS-XR-l2vpn-cfg:evpn/Cisco-IOS-XR-l2vpn-cfg:evpn-tables/Cisco-IOS-XR-l2vpn-cfg:evpn-interfaces/Cisco-IOS-XR-l2vpn-cfg:evpn-interface[Cisco-IOS-XR-l2vpn-cfg:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.ethernet_segment is not None and self.ethernet_segment._has_data():
return True
if self.evpnac_timers is not None and self.evpnac_timers._has_data():
return True
if self.mac_flush is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:evpn/Cisco-IOS-XR-l2vpn-cfg:evpn-tables/Cisco-IOS-XR-l2vpn-cfg:evpn-interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.evpn_interface is not None:
for child_ref in self.evpn_interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables.EvpnInterfaces']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:evpn/Cisco-IOS-XR-l2vpn-cfg:evpn-tables'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.evpn_interfaces is not None and self.evpn_interfaces._has_data():
return True
if self.evpn_load_balancing is not None and self.evpn_load_balancing._has_data():
return True
if self.evpn_timers is not None and self.evpn_timers._has_data():
return True
if self.evpnbgp_auto_discovery is not None and self.evpnbgp_auto_discovery._has_data():
return True
if self.evpnevis is not None and self.evpnevis._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn.EvpnTables']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-l2vpn-cfg:evpn'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.evpn_tables is not None and self.evpn_tables._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_l2vpn_cfg as meta
return meta._meta_table['Evpn']['meta_info']
| 49.158053
| 344
| 0.397196
| 61,088
| 836,031
| 5.216049
| 0.014176
| 0.041225
| 0.051532
| 0.048346
| 0.87622
| 0.848053
| 0.818819
| 0.788999
| 0.761924
| 0.734545
| 0
| 0.02126
| 0.541522
| 836,031
| 17,006
| 345
| 49.160943
| 0.810038
| 0.19333
| 0
| 0.746917
| 0
| 0.016138
| 0.10856
| 0.074115
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161375
| false
| 0.000262
| 0.037785
| 0.00433
| 0.500787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
4a7efabebfe971ead78807db2b6b690d11b509cc
| 11,504
|
py
|
Python
|
nnunet/training/network_training/nnUNet_variants/profiling/nnUNetTrainerV2_2epochs.py
|
Gitsamshi/nnUNet-1
|
5341684211e6d91dab6ad76a7595a95addff23be
|
[
"Apache-2.0"
] | 1
|
2020-12-17T02:06:57.000Z
|
2020-12-17T02:06:57.000Z
|
nnunet/training/network_training/nnUNet_variants/profiling/nnUNetTrainerV2_2epochs.py
|
genhao3/nnUNet
|
756fb365f021cb78a6370c09a970d6f07df81022
|
[
"Apache-2.0"
] | null | null | null |
nnunet/training/network_training/nnUNet_variants/profiling/nnUNetTrainerV2_2epochs.py
|
genhao3/nnUNet
|
756fb365f021cb78a6370c09a970d6f07df81022
|
[
"Apache-2.0"
] | 1
|
2020-11-17T01:35:32.000Z
|
2020-11-17T01:35:32.000Z
|
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Tuple
import numpy as np
import torch
from nnunet.training.network_training.nnUNetTrainerV2 import nnUNetTrainerV2
from nnunet.training.network_training.nnUNetTrainerV2_DDP import nnUNetTrainerV2_DDP
from nnunet.training.network_training.nnUNet_variants.architectural_variants.nnUNetTrainerV2_noDeepSupervision import \
nnUNetTrainerV2_noDeepSupervision
from nnunet.utilities.to_torch import maybe_to_torch, to_cuda
from torch import nn
from torch.nn.utils import clip_grad_norm_
try:
from apex import amp
except ImportError:
amp = None
class nnUNetTrainerV2_2epochs(nnUNetTrainerV2):
def __init__(self, plans_file, fold, output_folder=None, dataset_directory=None, batch_dice=True, stage=None,
unpack_data=True, deterministic=True, fp16=False):
super().__init__(plans_file, fold, output_folder, dataset_directory, batch_dice, stage, unpack_data,
deterministic, fp16)
self.max_num_epochs = 2
def validate(self, do_mirroring: bool = True, use_sliding_window: bool = True, step_size: float = 0.5,
save_softmax: bool = True, use_gaussian: bool = True, overwrite: bool = True,
validation_folder_name: str = 'validation_raw', debug: bool = False, all_in_gpu: bool = False,
segmentation_export_kwargs=None):
pass
def predict_preprocessed_data_return_seg_and_softmax(self, data: np.ndarray, do_mirroring: bool = True,
mirror_axes: Tuple[int] = None, use_sliding_window: bool = True,
step_size: float = 0.5, use_gaussian: bool = True,
pad_border_mode: str = 'constant', pad_kwargs: dict = None,
all_in_gpu: bool = True,
verbose: bool = True) -> Tuple[np.ndarray, np.ndarray]:
pass
def save_checkpoint(self, fname, save_optimizer=True):
pass
class nnUNetTrainerV2_5epochs(nnUNetTrainerV2):
def __init__(self, plans_file, fold, output_folder=None, dataset_directory=None, batch_dice=True, stage=None,
unpack_data=True, deterministic=True, fp16=False):
super().__init__(plans_file, fold, output_folder, dataset_directory, batch_dice, stage, unpack_data,
deterministic, fp16)
self.max_num_epochs = 5
def validate(self, do_mirroring: bool = True, use_sliding_window: bool = True, step_size: float = 0.5,
save_softmax: bool = True, use_gaussian: bool = True, overwrite: bool = True,
validation_folder_name: str = 'validation_raw', debug: bool = False, all_in_gpu: bool = False,
segmentation_export_kwargs=None):
pass
def predict_preprocessed_data_return_seg_and_softmax(self, data: np.ndarray, do_mirroring: bool = True,
mirror_axes: Tuple[int] = None, use_sliding_window: bool = True,
step_size: float = 0.5, use_gaussian: bool = True,
pad_border_mode: str = 'constant', pad_kwargs: dict = None,
all_in_gpu: bool = True,
verbose: bool = True) -> Tuple[np.ndarray, np.ndarray]:
pass
def save_checkpoint(self, fname, save_optimizer=True):
pass
class nnUNetTrainerV2_5epochs_CEnoDS(nnUNetTrainerV2_noDeepSupervision):
def __init__(self, plans_file, fold, output_folder=None, dataset_directory=None, batch_dice=True, stage=None,
unpack_data=True, deterministic=True, fp16=False):
super().__init__(plans_file, fold, output_folder, dataset_directory, batch_dice, stage, unpack_data,
deterministic, fp16)
self.max_num_epochs = 5
self.loss = nn.CrossEntropyLoss() # CrossentropyND()
def validate(self, do_mirroring: bool = True, use_sliding_window: bool = True, step_size: float = 0.5,
save_softmax: bool = True, use_gaussian: bool = True, overwrite: bool = True,
validation_folder_name: str = 'validation_raw', debug: bool = False, all_in_gpu: bool = False,
segmentation_export_kwargs=None):
pass
def predict_preprocessed_data_return_seg_and_softmax(self, data: np.ndarray, do_mirroring: bool = True,
mirror_axes: Tuple[int] = None, use_sliding_window: bool = True,
step_size: float = 0.5, use_gaussian: bool = True,
pad_border_mode: str = 'constant', pad_kwargs: dict = None,
all_in_gpu: bool = True,
verbose: bool = True) -> Tuple[np.ndarray, np.ndarray]:
pass
def save_checkpoint(self, fname, save_optimizer=True):
pass
def run_iteration(self, data_generator, do_backprop=True, run_online_evaluation=False):
data_dict = next(data_generator)
data = data_dict['data']
target = data_dict['target']
data = maybe_to_torch(data)
target = maybe_to_torch(target)
if torch.cuda.is_available():
data = to_cuda(data)
target = to_cuda(target)
self.optimizer.zero_grad()
output = self.network(data)
del data
target = target.long()[:, 0]
loss = self.loss(output, target)
if run_online_evaluation:
self.run_online_evaluation(output, target)
del target
if do_backprop:
if not self.fp16 or amp is None or not torch.cuda.is_available():
loss.backward()
else:
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
_ = clip_grad_norm_(self.network.parameters(), 12)
self.optimizer.step()
return loss.detach().cpu().numpy()
def run_online_evaluation(self, output, target):
pass
def finish_online_evaluation(self):
pass
class nnUNetTrainerV2_5epochs_noDS(nnUNetTrainerV2_noDeepSupervision):
def __init__(self, plans_file, fold, output_folder=None, dataset_directory=None, batch_dice=True, stage=None,
unpack_data=True, deterministic=True, fp16=False):
super().__init__(plans_file, fold, output_folder, dataset_directory, batch_dice, stage, unpack_data,
deterministic, fp16)
self.max_num_epochs = 5
def validate(self, do_mirroring: bool = True, use_sliding_window: bool = True, step_size: float = 0.5,
save_softmax: bool = True, use_gaussian: bool = True, overwrite: bool = True,
validation_folder_name: str = 'validation_raw', debug: bool = False, all_in_gpu: bool = False,
segmentation_export_kwargs=None):
pass
def predict_preprocessed_data_return_seg_and_softmax(self, data: np.ndarray, do_mirroring: bool = True,
mirror_axes: Tuple[int] = None, use_sliding_window: bool = True,
step_size: float = 0.5, use_gaussian: bool = True,
pad_border_mode: str = 'constant', pad_kwargs: dict = None,
all_in_gpu: bool = True,
verbose: bool = True) -> Tuple[np.ndarray, np.ndarray]:
pass
def save_checkpoint(self, fname, save_optimizer=True):
pass
def run_iteration(self, data_generator, do_backprop=True, run_online_evaluation=False):
data_dict = next(data_generator)
data = data_dict['data']
target = data_dict['target']
data = maybe_to_torch(data)
target = maybe_to_torch(target)
if torch.cuda.is_available():
data = to_cuda(data)
target = to_cuda(target)
self.optimizer.zero_grad()
output = self.network(data)
del data
loss = self.loss(output, target)
if run_online_evaluation:
self.run_online_evaluation(output, target)
del target
if do_backprop:
if not self.fp16 or amp is None or not torch.cuda.is_available():
loss.backward()
else:
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
_ = clip_grad_norm_(self.network.parameters(), 12)
self.optimizer.step()
return loss.detach().cpu().numpy()
def run_online_evaluation(self, output, target):
pass
def finish_online_evaluation(self):
pass
class nnUNetTrainerV2_DDP_5epochs(nnUNetTrainerV2_DDP):
def __init__(self, plans_file, fold, local_rank, output_folder=None, dataset_directory=None, batch_dice=True,
stage=None,
unpack_data=True, deterministic=True, distribute_batch_size=False, fp16=False):
super().__init__(plans_file, fold, local_rank, output_folder, dataset_directory, batch_dice, stage, unpack_data,
deterministic, distribute_batch_size, fp16)
self.max_num_epochs = 5
def validate(self, do_mirroring: bool = True, use_sliding_window: bool = True, step_size: float = 0.5,
save_softmax: bool = True, use_gaussian: bool = True, overwrite: bool = True,
validation_folder_name: str = 'validation_raw', debug: bool = False, all_in_gpu: bool = False,
segmentation_export_kwargs=None):
pass
def predict_preprocessed_data_return_seg_and_softmax(self, data: np.ndarray, do_mirroring: bool = True,
mirror_axes: Tuple[int] = None, use_sliding_window: bool = True,
step_size: float = 0.5, use_gaussian: bool = True,
pad_border_mode: str = 'constant', pad_kwargs: dict = None,
all_in_gpu: bool = True,
verbose: bool = True) -> Tuple[np.ndarray, np.ndarray]:
pass
def save_checkpoint(self, fname, save_optimizer=True):
pass
| 48.745763
| 121
| 0.595967
| 1,291
| 11,504
| 5.030984
| 0.152595
| 0.061586
| 0.020015
| 0.029253
| 0.842186
| 0.837105
| 0.820631
| 0.811085
| 0.811085
| 0.811085
| 0
| 0.01069
| 0.325104
| 11,504
| 236
| 122
| 48.745763
| 0.825863
| 0.058675
| 0
| 0.845714
| 0
| 0
| 0.012024
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.148571
| false
| 0.108571
| 0.062857
| 0
| 0.251429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
436ce52a3e1fb4a0398ef2205da3e6a7372417ae
| 131
|
py
|
Python
|
examples/python/fastapi/lib/car/car.py
|
ScriptBox99/pyroscope
|
fbf5bd297caf6a987f9fb6ffd0240ed804eaf9b4
|
[
"Apache-2.0"
] | 5,751
|
2021-01-01T18:58:15.000Z
|
2022-03-31T19:19:39.000Z
|
examples/python/fastapi/lib/car/car.py
|
ScriptBox99/pyroscope
|
fbf5bd297caf6a987f9fb6ffd0240ed804eaf9b4
|
[
"Apache-2.0"
] | 913
|
2021-01-05T07:46:12.000Z
|
2022-03-31T20:04:39.000Z
|
examples/python/fastapi/lib/car/car.py
|
admariner/pyroscope
|
e13afb40348914ae29b813881bfad0ca3b89f250
|
[
"Apache-2.0"
] | 329
|
2021-01-11T06:25:55.000Z
|
2022-03-29T08:19:33.000Z
|
from lib.utility.utility import find_nearest_vehicle
def order_car(search_radius):
find_nearest_vehicle(search_radius, "car")
| 26.2
| 52
| 0.824427
| 19
| 131
| 5.315789
| 0.631579
| 0.217822
| 0.356436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099237
| 131
| 4
| 53
| 32.75
| 0.855932
| 0
| 0
| 0
| 0
| 0
| 0.022901
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
43a18920d1b55d41b6d80fe13840edbae942ac15
| 82,219
|
py
|
Python
|
hf_datasets/generated_definitions.py
|
madlag/CodeXGLUE
|
43a3ddd4e7a4d5190c118b4948c47b333d31c3a2
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
hf_datasets/generated_definitions.py
|
madlag/CodeXGLUE
|
43a3ddd4e7a4d5190c118b4948c47b333d31c3a2
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
hf_datasets/generated_definitions.py
|
madlag/CodeXGLUE
|
43a3ddd4e7a4d5190c118b4948c47b333d31c3a2
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
DEFINITIONS={
"cc_clone_detection_big_clone_bench": {
"class_name": "CodeXGlueCCCloneDetectionBigCloneBench",
"data_dir_name": "dataset",
"dataset_type": "Code-Code",
"description": "CodeXGLUE Clone-detection-BigCloneBench dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/Clone-detection-BigCloneBench",
"dir_name": "Clone-detection-BigCloneBench",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Clone-detection-BigCloneBench/dataset/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Clone-detection-BigCloneBench/dataset/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Clone-detection-BigCloneBench/dataset/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Clone-detection-BigCloneBench/dataset/data.jsonl"
],
"name": "cc_clone_detection_big_clone_bench",
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/Clone-detection-BigCloneBench",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/Clone-detection-BigCloneBench/dataset"
},
"cc_clone_detection_poj_104": {
"class_name": "CodeXGlueCCCloneDetectionPOJ104",
"data_dir_name": "dataset",
"dataset_type": "Code-Code",
"description": "CodeXGLUE Clone-detection-POJ-104 dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/Clone-detection-POJ-104",
"dir_name": "Clone-detection-POJ-104",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Clone-detection-POJ-104/dataset/programs.tar.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Clone-detection-POJ-104/dataset/preprocess.py"
],
"name": "cc_clone_detection_poj_104",
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/Clone-detection-POJ-104",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/Clone-detection-POJ-104/dataset"
},
"cc_cloze_testing_all_go": {
"class_name": "CodeXGlueCCClozeTestingAll",
"data_dir_name": "data/cloze-all/go",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-all dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"dir_name": "ClozeTesting-all",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-all/data/cloze-all/go/clozeTest.json"
],
"name": "cc_cloze_testing_all",
"parameters": {
"language": "go"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-all/data/cloze-all/go"
},
"cc_cloze_testing_all_java": {
"class_name": "CodeXGlueCCClozeTestingAll",
"data_dir_name": "data/cloze-all/java",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-all dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"dir_name": "ClozeTesting-all",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-all/data/cloze-all/java/clozeTest.json"
],
"name": "cc_cloze_testing_all",
"parameters": {
"language": "java"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-all/data/cloze-all/java"
},
"cc_cloze_testing_all_javascript": {
"class_name": "CodeXGlueCCClozeTestingAll",
"data_dir_name": "data/cloze-all/javascript",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-all dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"dir_name": "ClozeTesting-all",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-all/data/cloze-all/javascript/clozeTest.json"
],
"name": "cc_cloze_testing_all",
"parameters": {
"language": "javascript"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-all/data/cloze-all/javascript"
},
"cc_cloze_testing_all_php": {
"class_name": "CodeXGlueCCClozeTestingAll",
"data_dir_name": "data/cloze-all/php",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-all dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"dir_name": "ClozeTesting-all",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-all/data/cloze-all/php/clozeTest.json"
],
"name": "cc_cloze_testing_all",
"parameters": {
"language": "php"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-all/data/cloze-all/php"
},
"cc_cloze_testing_all_python": {
"class_name": "CodeXGlueCCClozeTestingAll",
"data_dir_name": "data/cloze-all/python",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-all dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"dir_name": "ClozeTesting-all",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-all/data/cloze-all/python/clozeTest.json"
],
"name": "cc_cloze_testing_all",
"parameters": {
"language": "python"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-all/data/cloze-all/python"
},
"cc_cloze_testing_all_ruby": {
"class_name": "CodeXGlueCCClozeTestingAll",
"data_dir_name": "data/cloze-all/ruby",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-all dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"dir_name": "ClozeTesting-all",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-all/data/cloze-all/ruby/clozeTest.json"
],
"name": "cc_cloze_testing_all",
"parameters": {
"language": "ruby"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-all",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-all/data/cloze-all/ruby"
},
"cc_cloze_testing_maxmin_go": {
"class_name": "CodeXGlueCCClozeTestingMaxmin",
"data_dir_name": "data/cloze-maxmin/go",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-maxmin dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"dir_name": "ClozeTesting-maxmin",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/go/clozeTest.json"
],
"name": "cc_cloze_testing_maxmin",
"parameters": {
"language": "go"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/go"
},
"cc_cloze_testing_maxmin_java": {
"class_name": "CodeXGlueCCClozeTestingMaxmin",
"data_dir_name": "data/cloze-maxmin/java",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-maxmin dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"dir_name": "ClozeTesting-maxmin",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/java/clozeTest.json"
],
"name": "cc_cloze_testing_maxmin",
"parameters": {
"language": "java"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/java"
},
"cc_cloze_testing_maxmin_javascript": {
"class_name": "CodeXGlueCCClozeTestingMaxmin",
"data_dir_name": "data/cloze-maxmin/javascript",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-maxmin dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"dir_name": "ClozeTesting-maxmin",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/javascript/clozeTest.json"
],
"name": "cc_cloze_testing_maxmin",
"parameters": {
"language": "javascript"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/javascript"
},
"cc_cloze_testing_maxmin_php": {
"class_name": "CodeXGlueCCClozeTestingMaxmin",
"data_dir_name": "data/cloze-maxmin/php",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-maxmin dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"dir_name": "ClozeTesting-maxmin",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/php/clozeTest.json"
],
"name": "cc_cloze_testing_maxmin",
"parameters": {
"language": "php"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/php"
},
"cc_cloze_testing_maxmin_python": {
"class_name": "CodeXGlueCCClozeTestingMaxmin",
"data_dir_name": "data/cloze-maxmin/python",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-maxmin dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"dir_name": "ClozeTesting-maxmin",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/python/clozeTest.json"
],
"name": "cc_cloze_testing_maxmin",
"parameters": {
"language": "python"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/python"
},
"cc_cloze_testing_maxmin_ruby": {
"class_name": "CodeXGlueCCClozeTestingMaxmin",
"data_dir_name": "data/cloze-maxmin/ruby",
"dataset_type": "Code-Code",
"description": "CodeXGLUE ClozeTesting-maxmin dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"dir_name": "ClozeTesting-maxmin",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/ruby/clozeTest.json"
],
"name": "cc_cloze_testing_maxmin",
"parameters": {
"language": "ruby"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/ClozeTesting-maxmin",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/ClozeTesting-maxmin/data/cloze-maxmin/ruby"
},
"cc_code_completion_line_java": {
"class_name": "CodeXGlueCCCodeCompletionLine",
"data_dir_name": "dataset/javaCorpus/line_completion",
"dataset_type": "Code-Code",
"description": "CodeXGLUE CodeCompletion-line dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/CodeCompletion-line",
"dir_name": "CodeCompletion-line",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/CodeCompletion-line/dataset/javaCorpus/line_completion/test.json"
],
"name": "cc_code_completion_line",
"parameters": {
"language": "java",
"original_language_name": "javaCorpus"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/CodeCompletion-line",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/CodeCompletion-line/dataset/javaCorpus/line_completion"
},
"cc_code_completion_line_python": {
"class_name": "CodeXGlueCCCodeCompletionLine",
"data_dir_name": "dataset/py150/line_completion",
"dataset_type": "Code-Code",
"description": "CodeXGLUE CodeCompletion-line dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/CodeCompletion-line",
"dir_name": "CodeCompletion-line",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/CodeCompletion-line/dataset/py150/line_completion/test.json"
],
"name": "cc_code_completion_line",
"parameters": {
"language": "python",
"original_language_name": "py150"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/CodeCompletion-line",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/CodeCompletion-line/dataset/py150/line_completion"
},
"cc_code_completion_token_java": {
"class_name": "CodeXGlueCCCodeCompletionTokenJava",
"data_dir_name": "dataset/javaCorpus",
"dataset_type": "Code-Code",
"description": "CodeXGLUE CodeCompletion-token dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/CodeCompletion-token",
"dir_name": "CodeCompletion-token",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/CodeCompletion-token/dataset/javaCorpus/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/CodeCompletion-token/dataset/javaCorpus/download.sh"
],
"name": "cc_code_completion_token",
"parameters": {
"language": "java",
"original_language_name": "javaCorpus"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/CodeCompletion-token",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/CodeCompletion-token/dataset/javaCorpus"
},
"cc_code_completion_token_python": {
"class_name": "CodeXGlueCCCodeCompletionTokenPython",
"data_dir_name": "dataset/py150",
"dataset_type": "Code-Code",
"description": "CodeXGLUE CodeCompletion-token dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/CodeCompletion-token",
"dir_name": "CodeCompletion-token",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/CodeCompletion-token/dataset/py150/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/CodeCompletion-token/dataset/py150/download_and_extract.sh"
],
"name": "cc_code_completion_token",
"parameters": {
"language": "python",
"original_language_name": "py150"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/CodeCompletion-token",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/CodeCompletion-token/dataset/py150"
},
"cc_code_refinement_medium": {
"class_name": "CodeXGlueCCCodeRefinement",
"data_dir_name": "data",
"dataset_type": "Code-Code",
"description": "CodeXGLUE code-refinement dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/code-refinement",
"dir_name": "code-refinement",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/train.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/test.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/train.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/test.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/valid.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/valid.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/train.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/test.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/train.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/test.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/valid.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/valid.buggy-fixed.fixed"
],
"name": "cc_code_refinement",
"parameters": {
"size": "medium"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/code-refinement",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/code-refinement/data"
},
"cc_code_refinement_small": {
"class_name": "CodeXGlueCCCodeRefinement",
"data_dir_name": "data",
"dataset_type": "Code-Code",
"description": "CodeXGLUE code-refinement dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/code-refinement",
"dir_name": "code-refinement",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/train.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/test.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/train.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/test.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/valid.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/small/valid.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/train.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/test.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/train.buggy-fixed.fixed",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/test.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/valid.buggy-fixed.buggy",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-refinement/data/medium/valid.buggy-fixed.fixed"
],
"name": "cc_code_refinement",
"parameters": {
"size": "small"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/code-refinement",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/code-refinement/data"
},
"cc_code_to_code_trans": {
"class_name": "CodeXGlueCCCodeToCodeTrans",
"data_dir_name": "data",
"dataset_type": "Code-Code",
"description": "CodeXGLUE code-to-code-trans dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/code-to-code-trans",
"dir_name": "code-to-code-trans",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-to-code-trans/data/train.java-cs.txt.java",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-to-code-trans/data/train.java-cs.txt.cs",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-to-code-trans/data/test.java-cs.txt.java",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-to-code-trans/data/valid.java-cs.txt.java",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-to-code-trans/data/test.java-cs.txt.cs",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/code-to-code-trans/data/valid.java-cs.txt.cs"
],
"name": "cc_code_to_code_trans",
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/code-to-code-trans",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/code-to-code-trans/data"
},
"cc_defect_detection": {
"class_name": "CodeXGlueCCDefectDetection",
"data_dir_name": "dataset",
"dataset_type": "Code-Code",
"description": "CodeXGLUE Defect-detection dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/Defect-detection",
"dir_name": "Defect-detection",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Defect-detection/dataset/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Defect-detection/dataset/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Defect-detection/dataset/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Defect-detection/dataset/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Code/Defect-detection/dataset/function.json"
],
"name": "cc_defect_detection",
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/Defect-detection",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/Defect-detection/dataset"
},
"ct_code_to_text_go": {
"class_name": "CodeXGlueCTCodeToText",
"data_dir_name": ".",
"dataset_type": "Code-Text",
"description": "CodeXGLUE code-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Text/code-to-text",
"dir_name": "code-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/README.md",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/evaluator.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/reference.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/predictions.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_dedupe_definitions_v2.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_licenses.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/test/python_test_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/valid/python_valid_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_7.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_10.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_1.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_9.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_4.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_8.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_3.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_13.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_6.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_12.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_11.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_2.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_5.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/bleu.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/model.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/run.py"
],
"name": "ct_code_to_text",
"parameters": {
"language": "go"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Text/code-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Text/code-to-text"
},
"ct_code_to_text_java": {
"class_name": "CodeXGlueCTCodeToText",
"data_dir_name": ".",
"dataset_type": "Code-Text",
"description": "CodeXGLUE code-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Text/code-to-text",
"dir_name": "code-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/README.md",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/evaluator.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/reference.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/predictions.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_dedupe_definitions_v2.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_licenses.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/test/python_test_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/valid/python_valid_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_7.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_10.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_1.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_9.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_4.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_8.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_3.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_13.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_6.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_12.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_11.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_2.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_5.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/bleu.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/model.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/run.py"
],
"name": "ct_code_to_text",
"parameters": {
"language": "java"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Text/code-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Text/code-to-text"
},
"ct_code_to_text_javascript": {
"class_name": "CodeXGlueCTCodeToText",
"data_dir_name": ".",
"dataset_type": "Code-Text",
"description": "CodeXGLUE code-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Text/code-to-text",
"dir_name": "code-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/README.md",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/evaluator.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/reference.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/predictions.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_dedupe_definitions_v2.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_licenses.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/test/python_test_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/valid/python_valid_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_7.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_10.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_1.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_9.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_4.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_8.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_3.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_13.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_6.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_12.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_11.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_2.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_5.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/bleu.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/model.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/run.py"
],
"name": "ct_code_to_text",
"parameters": {
"language": "javascript"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Text/code-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Text/code-to-text"
},
"ct_code_to_text_php": {
"class_name": "CodeXGlueCTCodeToText",
"data_dir_name": ".",
"dataset_type": "Code-Text",
"description": "CodeXGLUE code-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Text/code-to-text",
"dir_name": "code-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/README.md",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/evaluator.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/reference.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/predictions.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_dedupe_definitions_v2.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_licenses.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/test/python_test_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/valid/python_valid_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_7.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_10.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_1.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_9.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_4.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_8.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_3.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_13.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_6.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_12.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_11.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_2.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_5.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/bleu.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/model.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/run.py"
],
"name": "ct_code_to_text",
"parameters": {
"language": "php"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Text/code-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Text/code-to-text"
},
"ct_code_to_text_python": {
"class_name": "CodeXGlueCTCodeToText",
"data_dir_name": ".",
"dataset_type": "Code-Text",
"description": "CodeXGLUE code-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Text/code-to-text",
"dir_name": "code-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/README.md",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/evaluator.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/reference.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/predictions.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_dedupe_definitions_v2.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_licenses.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/test/python_test_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/valid/python_valid_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_7.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_10.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_1.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_9.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_4.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_8.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_3.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_13.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_6.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_12.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_11.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_2.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_5.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/bleu.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/model.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/run.py"
],
"name": "ct_code_to_text",
"parameters": {
"language": "python"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Text/code-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Text/code-to-text"
},
"ct_code_to_text_ruby": {
"class_name": "CodeXGlueCTCodeToText",
"data_dir_name": ".",
"dataset_type": "Code-Text",
"description": "CodeXGLUE code-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Text/code-to-text",
"dir_name": "code-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/README.md",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/evaluator.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/reference.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/evaluator/predictions.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_dedupe_definitions_v2.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python_licenses.pkl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/go/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/test/python_test_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/valid/python_valid_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_7.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_10.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_1.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_9.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_4.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_8.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_3.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_13.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_6.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_0.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_12.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_11.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_2.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/python/final/jsonl/train/python_train_5.jsonl.gz",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/java/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/ruby/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/php/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/test.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/dataset_back/javascript/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/bleu.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/model.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Code-Text/code-to-text/code/run.py"
],
"name": "ct_code_to_text",
"parameters": {
"language": "ruby"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Text/code-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Text/code-to-text"
},
"tc_nl_code_search_adv": {
"class_name": "CodeXGlueTCNLCodeSearchAdv",
"data_dir_name": ".",
"dataset_type": "Text-Code",
"description": "CodeXGLUE NL-code-search-Adv dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Text-Code/NL-code-search-Adv",
"dir_name": "NL-code-search-Adv",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-Adv/README.md",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-Adv/dataset.zip",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-Adv/evaluator/test.jsonl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-Adv/evaluator/evaluator.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-Adv/evaluator/predictions.jsonl",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-Adv/code/model.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-Adv/code/run.py"
],
"name": "tc_nl_code_search_adv",
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Text-Code/NL-code-search-Adv",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Text-Code/NL-code-search-Adv"
},
"tc_nl_code_search_web_query": {
"class_name": "CodeXGlueTCNLCodeSearchWebQuery",
"data_dir_name": "data",
"dataset_type": "Text-Code",
"description": "CodeXGLUE NL-code-search-WebQuery dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Text-Code/NL-code-search-WebQuery",
"dir_name": "NL-code-search-WebQuery",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-WebQuery/data/valid.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-WebQuery/data/preprocess.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-WebQuery/data/train.txt",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/NL-code-search-WebQuery/data/test_webquery.json"
],
"name": "tc_nl_code_search_web_query",
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Text-Code/NL-code-search-WebQuery",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Text-Code/NL-code-search-WebQuery/data"
},
"tc_text_to_code": {
"class_name": "CodeXGlueTCTextToCode",
"data_dir_name": "dataset",
"dataset_type": "Text-Code",
"description": "CodeXGLUE text-to-code dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Text-Code/text-to-code",
"dir_name": "text-to-code",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/text-to-code/dataset/concode/train.json",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/text-to-code/dataset/concode/test.json",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Code/text-to-code/dataset/concode/dev.json"
],
"name": "tc_text_to_code",
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Text-Code/text-to-code",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Text-Code/text-to-code/dataset"
},
"tt_text_to_text_da_en": {
"class_name": "CodeXGlueTTTextToText",
"data_dir_name": "data",
"dataset_type": "Text-Text",
"description": "CodeXGLUE text-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Text-Text/text-to-text",
"dir_name": "text-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/preprocessing.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/da-en.test.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/da-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/lv-en.test.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/zh-en.test.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/no-en.test.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/zh-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/lv-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/no-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/da-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/lv-en.dev.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/lv-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/zh-en.dev.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/no-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/zh-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/da-en.dev.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/no-en.dev.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/da-en.train.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/lv-en.train.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/no-en.train.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/zh-en.train.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/no-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/zh-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/lv-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/da-en.train.en"
],
"name": "tt_text_to_text",
"parameters": {
"natural_language_pair": "da-en"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Text-Text/text-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Text-Text/text-to-text/data"
},
"tt_text_to_text_lv_en": {
"class_name": "CodeXGlueTTTextToText",
"data_dir_name": "data",
"dataset_type": "Text-Text",
"description": "CodeXGLUE text-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Text-Text/text-to-text",
"dir_name": "text-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/preprocessing.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/da-en.test.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/da-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/lv-en.test.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/zh-en.test.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/no-en.test.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/zh-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/lv-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/no-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/da-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/lv-en.dev.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/lv-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/zh-en.dev.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/no-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/zh-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/da-en.dev.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/no-en.dev.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/da-en.train.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/lv-en.train.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/no-en.train.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/zh-en.train.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/no-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/zh-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/lv-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/da-en.train.en"
],
"name": "tt_text_to_text",
"parameters": {
"natural_language_pair": "lv-en"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Text-Text/text-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Text-Text/text-to-text/data"
},
"tt_text_to_text_no_en": {
"class_name": "CodeXGlueTTTextToText",
"data_dir_name": "data",
"dataset_type": "Text-Text",
"description": "CodeXGLUE text-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Text-Text/text-to-text",
"dir_name": "text-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/preprocessing.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/da-en.test.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/da-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/lv-en.test.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/zh-en.test.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/no-en.test.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/zh-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/lv-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/no-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/da-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/lv-en.dev.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/lv-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/zh-en.dev.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/no-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/zh-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/da-en.dev.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/no-en.dev.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/da-en.train.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/lv-en.train.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/no-en.train.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/zh-en.train.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/no-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/zh-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/lv-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/da-en.train.en"
],
"name": "tt_text_to_text",
"parameters": {
"natural_language_pair": "no-en"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Text-Text/text-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Text-Text/text-to-text/data"
},
"tt_text_to_text_zh_en": {
"class_name": "CodeXGlueTTTextToText",
"data_dir_name": "data",
"dataset_type": "Text-Text",
"description": "CodeXGLUE text-to-text dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Text-Text/text-to-text",
"dir_name": "text-to-text",
"files": [
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/preprocessing.py",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/da-en.test.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/da-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/lv-en.test.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/zh-en.test.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/no-en.test.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/zh-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/lv-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/test/no-en.test.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/da-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/lv-en.dev.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/lv-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/zh-en.dev.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/no-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/zh-en.dev.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/da-en.dev.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/dev/no-en.dev.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/da-en.train.da",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/lv-en.train.lv",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/no-en.train.no",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/zh-en.train.zh",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/no-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/zh-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/lv-en.train.en",
"/home/lagunas/devel/external/datasets-sprint/CodeXGLUE/Text-Text/text-to-text/data/train/da-en.train.en"
],
"name": "tt_text_to_text",
"parameters": {
"natural_language_pair": "zh-en"
},
"project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Text-Text/text-to-text",
"raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Text-Text/text-to-text/data"
}
}
| 85.734098
| 176
| 0.709508
| 10,933
| 82,219
| 5.247508
| 0.013628
| 0.086089
| 0.12522
| 0.18783
| 0.978735
| 0.973645
| 0.969967
| 0.966481
| 0.960694
| 0.937059
| 0
| 0.002564
| 0.131879
| 82,219
| 959
| 177
| 85.734098
| 0.801222
| 0
| 0
| 0.759124
| 0
| 0.577685
| 0.82497
| 0.641474
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.468196
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 12
|
43bb52641a07f380ec1baccd56e38b701c68ddf5
| 414
|
py
|
Python
|
kapitan/inputs/helm/helm_binding.py
|
haggishunk/kapitan
|
2de00ff2539562d48863a0db23d1bf0d9b23338e
|
[
"Apache-2.0"
] | null | null | null |
kapitan/inputs/helm/helm_binding.py
|
haggishunk/kapitan
|
2de00ff2539562d48863a0db23d1bf0d9b23338e
|
[
"Apache-2.0"
] | null | null | null |
kapitan/inputs/helm/helm_binding.py
|
haggishunk/kapitan
|
2de00ff2539562d48863a0db23d1bf0d9b23338e
|
[
"Apache-2.0"
] | null | null | null |
# auto-generated file
import _cffi_backend
ffi = _cffi_backend.FFI(
"helm_binding",
_version=0x2601,
_types=b"\x00\x00\x01\x0D\x00\x00\x0B\x03\x00\x00\x01\x11\x00\x00\x01\x11\x00\x00\x01\x11\x00\x00\x01\x11\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x00\x0C\x0D\x00\x00\x0C\x03\x00\x00\x00\x0F\x00\x00\x02\x01\x00\x00\x00\x01",
_globals=(b"\x00\x00\x08\x23free", 0, b"\x00\x00\x00\x23renderChart", 0),
)
| 41.4
| 223
| 0.71256
| 81
| 414
| 3.54321
| 0.333333
| 0.397213
| 0.219512
| 0.209059
| 0.334495
| 0.334495
| 0.229965
| 0.229965
| 0.229965
| 0.229965
| 0
| 0.314961
| 0.07971
| 414
| 9
| 224
| 46
| 0.43832
| 0.045894
| 0
| 0
| 1
| 0.142857
| 0.679389
| 0.597964
| 0
| 0
| 0.015267
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
606ca5e655a092dfbb09fcd296b088311f2b971c
| 132
|
py
|
Python
|
sql/snsql/__init__.py
|
shlomihod/smartnoise-sdk
|
1131fed432027c15caa5182d6da00286514efd00
|
[
"MIT"
] | 56
|
2021-02-21T19:45:47.000Z
|
2022-03-20T16:45:56.000Z
|
sql/snsql/__init__.py
|
shlomihod/smartnoise-sdk
|
1131fed432027c15caa5182d6da00286514efd00
|
[
"MIT"
] | 87
|
2021-02-20T20:43:49.000Z
|
2022-03-31T16:24:46.000Z
|
sql/snsql/__init__.py
|
shlomihod/smartnoise-sdk
|
1131fed432027c15caa5182d6da00286514efd00
|
[
"MIT"
] | 17
|
2021-02-18T18:47:09.000Z
|
2022-03-01T06:44:17.000Z
|
from .connect import from_connection, from_df
from .sql.privacy import Privacy
__all__ = ['from_connection', 'from_df', 'Privacy']
| 26.4
| 51
| 0.772727
| 18
| 132
| 5.222222
| 0.444444
| 0.297872
| 0.382979
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 132
| 5
| 51
| 26.4
| 0.803419
| 0
| 0
| 0
| 0
| 0
| 0.218045
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
60ac484877850d2884dbad86c4f0af289ff85fd6
| 12
|
py
|
Python
|
docker/python/app/codes/questions/course1/lesson1/study5/7/sample.py
|
morisakigogogo/python-react-demo
|
656299a200479bc5b508c6d15ad8d4cfbb0c7fe0
|
[
"MIT"
] | null | null | null |
docker/python/app/codes/questions/course1/lesson1/study5/7/sample.py
|
morisakigogogo/python-react-demo
|
656299a200479bc5b508c6d15ad8d4cfbb0c7fe0
|
[
"MIT"
] | null | null | null |
docker/python/app/codes/questions/course1/lesson1/study5/7/sample.py
|
morisakigogogo/python-react-demo
|
656299a200479bc5b508c6d15ad8d4cfbb0c7fe0
|
[
"MIT"
] | null | null | null |
print(10/2)
| 6
| 11
| 0.666667
| 3
| 12
| 2.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0.083333
| 12
| 1
| 12
| 12
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
60ca2a8845407f1ce61c42a58147757bb96b7e00
| 3,600
|
py
|
Python
|
test/fixture/ast.py
|
gcn-ying/arcee
|
dc6534e1e9bfaff2fbfec3a03026b1dc0439ac2b
|
[
"MIT"
] | 2
|
2019-11-15T03:59:18.000Z
|
2019-11-15T04:03:12.000Z
|
test/fixture/ast.py
|
gcn-ying/arcee
|
dc6534e1e9bfaff2fbfec3a03026b1dc0439ac2b
|
[
"MIT"
] | null | null | null |
test/fixture/ast.py
|
gcn-ying/arcee
|
dc6534e1e9bfaff2fbfec3a03026b1dc0439ac2b
|
[
"MIT"
] | null | null | null |
from arcee.lexer.lexer import Token
from arcee.parser.arcee_parser import Nonterminal
parser_syntax = '''program : expression ;
expression : constexp
| diffexp
| zeroexp
| ifexp
| varexp
| letexp
;
constexp : $NUMBER ;
diffexp : '-' '(' expression ',' expression ')' ;
zeroexp : 'zero?' '(' expression ')' ;
ifexp : 'if' expression 'then' expression 'else' expression ;
varexp : $ID ;
letexp : 'let' $ID '=' expression 'in' expression ;'''
ast = [
Nonterminal(head=Token(type=3, text='program'), content=[[Token(type=3, text='expression')]]),
Nonterminal(head=Token(type=3, text='expression'),
content=[[Token(type=3, text='constexp')], [Token(type=3, text='diffexp')],
[Token(type=3, text='zeroexp')], [Token(type=3, text='ifexp')],
[Token(type=3, text='varexp')], [Token(type=3, text='letexp')]]),
Nonterminal(head=Token(type=3, text='constexp'),
content=[[Token(type=5, text='$NUMBER')]]),
Nonterminal(head=Token(type=3, text='diffexp'),
content=[[Token(type=4, text='-'), Token(type=4, text='('), Token(type=3, text='expression'),
Token(type=4, text=','), Token(type=3, text='expression'), Token(type=4, text=')')]]),
Nonterminal(head=Token(type=3, text='zeroexp'),
content=[[Token(type=4, text='zero?'), Token(type=4, text='('), Token(type=3, text='expression'),
Token(type=4, text=')')]]),
Nonterminal(head=Token(type=3, text='ifexp'),
content=[[Token(type=4, text='if'), Token(type=3, text='expression'), Token(type=4, text='then'),
Token(type=3, text='expression'), Token(type=4, text='else'),
Token(type=3, text='expression')]]),
Nonterminal(head=Token(type=3, text='varexp'),
content=[[Token(type=5, text='$ID')]]),
Nonterminal(head=Token(type=3, text='letexp'),
content=[[Token(type=4, text='let'), Token(type=5, text='$ID'), Token(type=4, text='='),
Token(type=3, text='expression'), Token(type=4, text='in'),
Token(type=3, text='expression')]])
]
keywords = ['$ID', '$NUMBER']
tokens_name = ['program', 'expression', 'constexp', 'diffexp', 'zeroexp', 'ifexp', 'varexp', 'letexp']
lexer_re = {'$ID': r'[a-zA-Z]+', '$NUMBER': r'\d+(\.\d*)'}
nonterminals_where_content_has_many_tokens = (
Nonterminal(head=Token(type=3, text='expression'),
content=[[Token(type=3, text='constexp')], [Token(type=3, text='diffexp')],
[Token(type=3, text='zeroexp')], [Token(type=3, text='ifexp')],
[Token(type=3, text='varexp')], [Token(type=3, text='letexp')]]),
)
a = (
[[Token(type=3, text='expression')]],
[[Token(type=5, text='$NUMBER')]],
[[Token(type=4, text='-'), Token(type=4, text='('), Token(type=3, text='expression'), Token(type=4, text=','), Token(type=3, text='expression'), Token(type=4, text=')')]],
[[Token(type=4, text='zero?'), Token(type=4, text='('), Token(type=3, text='expression'), Token(type=4, text=')')]],
[[Token(type=4, text='if'), Token(type=3, text='expression'), Token(type=4, text='then'), Token(type=3, text='expression'), Token(type=4, text='else'), Token(type=3, text='expression')]],
[[Token(type=5, text='$ID')]],
[[Token(type=4, text='let'), Token(type=5, text='$ID'), Token(type=4, text='='), Token(type=3, text='expression'), Token(type=4, text='in'), Token(type=3, text='expression')]]
)
| 55.384615
| 191
| 0.563333
| 447
| 3,600
| 4.516779
| 0.100671
| 0.316493
| 0.193165
| 0.270431
| 0.824666
| 0.760277
| 0.702823
| 0.654284
| 0.654284
| 0.627538
| 0
| 0.024661
| 0.200278
| 3,600
| 64
| 192
| 56.25
| 0.676624
| 0
| 0
| 0.135593
| 0
| 0
| 0.237222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.033898
| 0
| 0.033898
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71c883fb97ca1601f3b1a2d607784c51dfb716f2
| 15,929
|
py
|
Python
|
binary_class/load_data.py
|
HarikrishnanNB/genome-classification-nl
|
087d187d2320b6f3a4fedde00c67f569e18cca98
|
[
"Apache-2.0"
] | null | null | null |
binary_class/load_data.py
|
HarikrishnanNB/genome-classification-nl
|
087d187d2320b6f3a4fedde00c67f569e18cca98
|
[
"Apache-2.0"
] | null | null | null |
binary_class/load_data.py
|
HarikrishnanNB/genome-classification-nl
|
087d187d2320b6f3a4fedde00c67f569e18cca98
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Author: Harikrishnan NB
Email: harikrishnannb07@gmail.com
Data details:
Class-0: SARS-CoV-2
Class-1: Coronaviradae
Class-2: Metapneumovirus
Class-3: Rhinovirus
Feature Transformation
"A" = 1.0
"G" = 0.75
"T" = 0.50
"C" = 0.25
"""
from Bio import SeqIO
import os
import numpy as np
import logging
from numpy.fft import fft
# def pre_processing_(DATA_NAME, label, GENOME_LENGTH, SEQUENCE_THRESHOLD_LENGTH):
# """
# Parameters
# ----------
# DATA_NAME : TYPE
# DATA_NAME = ['Sars_cov_2.genomes' , 'Coronaviridae.genomes', 'Metapneumovirus.genomes', 'Rhinovirus.genomes']
# label : TYPE
# DESCRIPTION.
# GENOME_LENGTH : TYPE
# DESCRIPTION.
# SEQUENCE_THRESHOLD_LENGTH : TYPE
# DESCRIPTION.
# Returns
# -------
# fourier_data_normalized : TYPE
# DESCRIPTION.
# labels : TYPE
# DESCRIPTION.
# """
# for lab in label:
# num_instance = 0
# genome_list = []
# DATA_PATH = 'referencedata/'+ DATA_NAME[lab]+'.fasta'
# fasta_sequences = SeqIO.parse(open(DATA_PATH),'fasta')
# # with open(output_file) as out_file:
# for fasta in fasta_sequences:
# name, sequence = fasta.id, str(fasta.seq)
# # new_sequence = some_function(sequence)
# # write_fasta(out_file)
# print(name)
# if len(sequence) >= SEQUENCE_THRESHOLD_LENGTH:
# genome_list.append(sequence)
# num_instance = num_instance + 1
# print("Number of samples = ", num_instance)
# genome_mat = np.zeros((num_instance, GENOME_LENGTH))
# for genome_index in range(0, num_instance):
# string = genome_list[genome_index]
# if len(string) < GENOME_LENGTH:
# for num_features in range(0, len(string)):
# if string[num_features] == 'A' or string[num_features] == 'a':
# genome_mat[genome_index, num_features] = 1
# elif string[num_features] == 'G' or string[num_features] == 'g':
# genome_mat[genome_index, num_features] = 0.75
# elif string[num_features] == 'T' or string[num_features] == 't':
# genome_mat[genome_index, num_features] = 0.50
# elif string[num_features] == 'C' or string[num_features] == 'c':
# genome_mat[genome_index, num_features] = 0.25
# else:
# genome_mat[genome_index, num_features] = 0
# print(string[num_features], ", Genome index = ",genome_index, ", Feature Number = ", num_features )
# else:
# for num_features in range(0, GENOME_LENGTH):
# if string[num_features] == 'A' or string[num_features] == 'a':
# genome_mat[genome_index, num_features] = 1
# elif string[num_features] == 'G' or string[num_features] == 'g':
# genome_mat[genome_index, num_features] = 0.75
# elif string[num_features] == 'T' or string[num_features] == 't':
# genome_mat[genome_index, num_features] = 0.50
# elif string[num_features] == 'C' or string[num_features] == 'c':
# genome_mat[genome_index, num_features] = 0.25
# else:
# genome_mat[genome_index, num_features] = 0
# print(string[num_features], ", Genome index = ",genome_index, ", Feature Number = ", num_features )
# fourier_features = np.zeros((genome_mat.shape[0], genome_mat.shape[1]))
# labels = lab * np.ones((genome_mat.shape[0], 1))
# # Computing the absolute value Fast Fourier transform coefficients of each data instance.
# for data_instance in range(0, genome_mat.shape[0]):
# fourier_features[data_instance, :] = np.abs(fft(genome_mat[data_instance, :]))
# # Normalization done for each row.
# numerator = fourier_features.T - np.min(fourier_features, axis=1)
# denominator = np.max(fourier_features, axis=1) - np.min(fourier_features, axis=1)
# fourier_data_normalized = (numerator/denominator).T
# # Checking whether the data is normalized.
# try:
# assert np.min(fourier_data_normalized) >= 0.0 and np.max(fourier_data_normalized) <= 1.0
# except AssertionError:
# logging.error("Error-Data should be in the range [0, 1]", exc_info=True)
# return fourier_data_normalized, labels
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Author: Harikrishnan NB
Email: harikrishnannb07@gmail.com
Data details:
Class-0: SARS-CoV-2
Class-1: Coronaviradae
Class-2: Metapneumovirus
Class-3: Rhinovirus
Feature Transformation
"A" = 1.0
"G" = 0.75
"T" = 0.50
"C" = 0.25
"""
from Bio import SeqIO
import os
import numpy as np
import logging
from numpy.fft import fft
def pre_processing_(DATA_NAME, label, GENOME_LENGTH, SEQUENCE_THRESHOLD_LENGTH):
"""
Parameters
----------
DATA_NAME : TYPE
DATA_NAME = ['Sars_cov_2.genomes' , 'Coronaviridae.genomes', 'Metapneumovirus.genomes', 'Rhinovirus.genomes', 'Influenza.genomes' ]
label : TYPE
DESCRIPTION.
GENOME_LENGTH : TYPE
DESCRIPTION.
SEQUENCE_THRESHOLD_LENGTH : TYPE
DESCRIPTION.
Returns
-------
fourier_data_normalized : TYPE
DESCRIPTION.
labels : TYPE
DESCRIPTION.
"""
if label[0] != 4:#'Influenza.genomes'
for lab in label:
num_instance = 0
genome_list = []
DATA_PATH = 'referencedata/'+ DATA_NAME[lab]+'.fasta'
fasta_sequences = SeqIO.parse(open(DATA_PATH),'fasta')
# with open(output_file) as out_file:
for fasta in fasta_sequences:
name, sequence = fasta.id, str(fasta.seq)
# new_sequence = some_function(sequence)
# write_fasta(out_file)
print(name)
if len(sequence) >= SEQUENCE_THRESHOLD_LENGTH:
genome_list.append(sequence)
num_instance = num_instance + 1
print("Number of samples = ", num_instance)
genome_mat = np.zeros((num_instance, GENOME_LENGTH))
for genome_index in range(0, num_instance):
string = genome_list[genome_index]
if len(string) < GENOME_LENGTH:
for num_features in range(0, len(string)):
if string[num_features] == 'A' or string[num_features] == 'a':
genome_mat[genome_index, num_features] = 1
elif string[num_features] == 'G' or string[num_features] == 'g':
genome_mat[genome_index, num_features] = 0.75
elif string[num_features] == 'T' or string[num_features] == 't':
genome_mat[genome_index, num_features] = 0.50
elif string[num_features] == 'C' or string[num_features] == 'c':
genome_mat[genome_index, num_features] = 0.25
else:
genome_mat[genome_index, num_features] = 0
print(string[num_features], ", Genome index = ",genome_index, ", Feature Number = ", num_features )
else:
for num_features in range(0, GENOME_LENGTH):
if string[num_features] == 'A' or string[num_features] == 'a':
genome_mat[genome_index, num_features] = 1
elif string[num_features] == 'G' or string[num_features] == 'g':
genome_mat[genome_index, num_features] = 0.75
elif string[num_features] == 'T' or string[num_features] == 't':
genome_mat[genome_index, num_features] = 0.50
elif string[num_features] == 'C' or string[num_features] == 'c':
genome_mat[genome_index, num_features] = 0.25
else:
genome_mat[genome_index, num_features] = 0
print(string[num_features], ", Genome index = ",genome_index, ", Feature Number = ", num_features )
fourier_features = np.zeros((genome_mat.shape[0], genome_mat.shape[1]))
labels = lab * np.ones((genome_mat.shape[0], 1))
# Computing the absolute value Fast Fourier transform coefficients of each data instance.
for data_instance in range(0, genome_mat.shape[0]):
fourier_features[data_instance, :] = np.abs(fft(genome_mat[data_instance, :]))
# Normalization done for each row.
numerator = fourier_features.T - np.min(fourier_features, axis=1)
denominator = np.max(fourier_features, axis=1) - np.min(fourier_features, axis=1)
fourier_data_normalized = (numerator/denominator).T
# Checking whether the data is normalized.
try:
assert np.min(fourier_data_normalized) >= 0.0 and np.max(fourier_data_normalized) <= 1.0
except AssertionError:
logging.error("Error-Data should be in the range [0, 1]", exc_info=True)
return fourier_data_normalized, labels
else:
lab=label[0]
DATA_PATH = 'referencedata/'+ DATA_NAME[lab]+'.fasta'
fasta_sequences = SeqIO.parse(open(DATA_PATH),'fasta')
skip_length=8
name_list = []
sequence_list = []
for fasta in fasta_sequences:
name, sequence = fasta.id, str(fasta.seq)
name_list.append(name)
sequence_list.append(sequence)
# temp_sequence = temp_sequence+sequence
# if np.mod(num, skip_length)==0:
num_instance = np.int(len(name_list)/8)
genome_mat = np.zeros((num_instance, GENOME_LENGTH))
string_list =[]
temp_string = sequence_list[0]
for itera in range(1, len(sequence_list)):
genome_numeric_list =[]
string = sequence_list[itera]
if np.mod(itera, skip_length) != 0:
temp_string = temp_string+string
if itera == 1023:
string_list.append(temp_string)
else:
string_list.append(temp_string)
temp_string = string
for genome_index in range(0, num_instance):
string = string_list[genome_index]
if len(string) < GENOME_LENGTH:
for num_features in range(0, len(string)):
if string[num_features] == 'A' or string[num_features] == 'a':
genome_mat[genome_index, num_features] = 1
elif string[num_features] == 'G' or string[num_features] == 'g':
genome_mat[genome_index, num_features] = 0.75
elif string[num_features] == 'T' or string[num_features] == 't':
genome_mat[genome_index, num_features] = 0.50
elif string[num_features] == 'C' or string[num_features] == 'c':
genome_mat[genome_index, num_features] = 0.25
else:
genome_mat[genome_index, num_features] = 0
print(string[num_features], ", Genome index = ",genome_index, ", Feature Number = ", num_features )
else:
for num_features in range(0, GENOME_LENGTH):
if string[num_features] == 'A' or string[num_features] == 'a':
genome_mat[genome_index, num_features] = 1
elif string[num_features] == 'G' or string[num_features] == 'g':
genome_mat[genome_index, num_features] = 0.75
elif string[num_features] == 'T' or string[num_features] == 't':
genome_mat[genome_index, num_features] = 0.50
elif string[num_features] == 'C' or string[num_features] == 'c':
genome_mat[genome_index, num_features] = 0.25
else:
genome_mat[genome_index, num_features] = 0
print(string[num_features], ", Genome index = ",genome_index, ", Feature Number = ", num_features )
fourier_features = np.zeros((genome_mat.shape[0], genome_mat.shape[1]))
labels = lab * np.ones((genome_mat.shape[0], 1))
# Computing the absolute value Fast Fourier transform coefficients of each data instance.
for data_instance in range(0, genome_mat.shape[0]):
fourier_features[data_instance, :] = np.abs(fft(genome_mat[data_instance, :]))
# Normalization done for each row.
numerator = fourier_features.T - np.min(fourier_features, axis=1)
denominator = np.max(fourier_features, axis=1) - np.min(fourier_features, axis=1)
fourier_data_normalized = (numerator/denominator).T
# Checking whether the data is normalized.
try:
assert np.min(fourier_data_normalized) >= 0.0 and np.max(fourier_data_normalized) <= 1.0
except AssertionError:
logging.error("Error-Data should be in the range [0, 1]", exc_info=True)
return fourier_data_normalized, labels
def binary_data_sars_1_2():
DATA_PATH = "PREPROCESSED_DATA/"
COV_1_DATA = np.load(DATA_PATH + "COV_1_DATA.npy")
COV_1_LABEL = np.load(DATA_PATH + "COV_1_LABEL.npy")
COV_2_DATA = np.load(DATA_PATH + "COV_2_DATA.npy")
COV_2_LABEL = np.load(DATA_PATH + "COV_2_LABEL.npy")
DATA = np.vstack((COV_1_DATA, COV_2_DATA))
LABELS = np.vstack((COV_1_LABEL,COV_2_LABEL))
FOURIER_FEATURES = np.zeros((DATA.shape[0],DATA.shape[1]))
for data_instance in range(0, DATA.shape[0]):
FOURIER_FEATURES[data_instance,:] = np.abs( fft( DATA[data_instance,:] ) )
X_TRAIN_NORM = ((FOURIER_FEATURES.T - np.min(FOURIER_FEATURES, axis = 1))/(np.max(FOURIER_FEATURES, axis= 1) - np.min(FOURIER_FEATURES, axis = 1))).T
try:
assert np.min(X_TRAIN_NORM) >= 0.0 and np.max(X_TRAIN_NORM) <= 1.0
except AssertionError:
logging.error("Train Data is NOT normalized. Hint: Go to get_data() function and normalize the data to lie in the range [0, 1]", exc_info=True)
#
return X_TRAIN_NORM, LABELS
# DATA_NAME = ['Sars_cov_2.genomes' , 'Coronaviridae.genomes', 'Metapneumovirus.genomes', 'Rhinovirus.genomes']
# label = [1]
# GENOME_LENGTH = 8000
# SEQUENCE_THRESHOLD_LENGTH = 6000
# fourier_data_normalized, labels = pre_processing_(DATA_NAME, label, GENOME_LENGTH, SEQUENCE_THRESHOLD_LENGTH)
| 39.723192
| 153
| 0.545734
| 1,776
| 15,929
| 4.656532
| 0.091216
| 0.12769
| 0.111004
| 0.072551
| 0.907497
| 0.894921
| 0.877146
| 0.877146
| 0.87243
| 0.854293
| 0
| 0.020864
| 0.350053
| 15,929
| 400
| 154
| 39.8225
| 0.777939
| 0.357147
| 0
| 0.707483
| 0
| 0.006803
| 0.052342
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 1
| 0.013605
| false
| 0
| 0.068027
| 0
| 0.102041
| 0.040816
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0da7b14a7b51eded2145684fe789dc486b43aea
| 112
|
py
|
Python
|
RecoBTag/PerformanceDB/python/BTagPerformanceDBMC36X.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
RecoBTag/PerformanceDB/python/BTagPerformanceDBMC36X.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
RecoBTag/PerformanceDB/python/BTagPerformanceDBMC36X.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
from RecoBTag.PerformanceDB.measure.Btag_pf36 import *
from RecoBTag.PerformanceDB.measure.Btag_calo36 import *
| 37.333333
| 56
| 0.857143
| 14
| 112
| 6.714286
| 0.571429
| 0.255319
| 0.531915
| 0.680851
| 0.765957
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.071429
| 112
| 2
| 57
| 56
| 0.865385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e0eaa591794989000de1ff09fc74578cace3853c
| 287
|
py
|
Python
|
django-server/CoronaGo/keys.py
|
Junt0/CoronaGo
|
c75e14c8b07be1be8d8d3a323aceb81a55aa969b
|
[
"MIT"
] | null | null | null |
django-server/CoronaGo/keys.py
|
Junt0/CoronaGo
|
c75e14c8b07be1be8d8d3a323aceb81a55aa969b
|
[
"MIT"
] | 6
|
2021-03-19T01:22:24.000Z
|
2021-09-22T18:49:17.000Z
|
django-server/CoronaGo/keys.py
|
Junt0/CoronaGo
|
c75e14c8b07be1be8d8d3a323aceb81a55aa969b
|
[
"MIT"
] | null | null | null |
"""
DO NOT COMMIT THIS FILE WITH INFORMATION FILLED OUT!
DO NOT COMMIT THIS FILE WITH INFORMATION FILLED OUT!
DO NOT COMMIT THIS FILE WITH INFORMATION FILLED OUT!
DO NOT COMMIT THIS FILE WITH INFORMATION FILLED OUT!
"""
DB_USERNAME = ''
DB_PASSWORD = ''
SECRET_KEY = ''
| 26.090909
| 56
| 0.710801
| 42
| 287
| 4.785714
| 0.333333
| 0.099502
| 0.218905
| 0.298507
| 0.855721
| 0.855721
| 0.855721
| 0.855721
| 0.855721
| 0.855721
| 0
| 0
| 0.222997
| 287
| 10
| 57
| 28.7
| 0.901345
| 0.735192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
4608c50c7a2bbaec2bea5d3f072e1e4d5d59b0b2
| 18,630
|
py
|
Python
|
tests/test_ReactionSystems.py
|
dsondak/pychemkin
|
c97f7b4c5a0306fa071709386e151edd36d30b45
|
[
"MIT"
] | 1
|
2021-08-31T12:33:03.000Z
|
2021-08-31T12:33:03.000Z
|
tests/test_ReactionSystems.py
|
hsim13372/pychemkin
|
0d98db487ed6862ec60ccbf2be27aced33ed9283
|
[
"MIT"
] | 8
|
2018-01-22T21:40:53.000Z
|
2018-05-31T15:44:17.000Z
|
tests/test_ReactionSystems.py
|
hsim13372/pychemkin
|
0d98db487ed6862ec60ccbf2be27aced33ed9283
|
[
"MIT"
] | 5
|
2018-01-24T21:12:17.000Z
|
2018-05-31T15:27:53.000Z
|
"""Test module for reaction systems"""
import os, sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import numpy
import os
import pytest
import warnings
#warnings.simplefilter("error")
from pychemkin.config import DB_DIRECTORY
from pychemkin.parsers.XMLParser import XMLParser
from pychemkin.parsers.SQLParser import SQLParser
from pychemkin.reactions.ReactionSystems import ReactionSystem
TEST_DB_PATH = os.path.join(DB_DIRECTORY + "/NASA7_coeffs.sqlite")
@pytest.fixture
def test_lowT_rxn_sys():
"""Returns a valid reaction system at low T"""
xml_filename = "tests/test_xml_files/rxn.xml"
xml_parser = XMLParser(xml_filename)
species = xml_parser.get_species()
sql_parser = SQLParser(TEST_DB_PATH, species)
print(sql_parser)
thermo_coeffs = sql_parser.get_thermo_coeffs()
temp = 500 # "low" temperature range in NASA coeffs database
concentrations = {'H':1, 'O2':1, 'H2O':1}
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
return rxnsys
@pytest.fixture
def test_highT_rxn_sys():
"""Returns a valid reaction system at high T"""
xml_filename = "tests/test_xml_files/rxn.xml"
xml_parser = XMLParser(xml_filename)
species = xml_parser.get_species()
sql_parser = SQLParser(TEST_DB_PATH, species)
thermo_coeffs = sql_parser.get_thermo_coeffs()
temp = 5000 # "high" temperature range in NASA coeffs database
concentrations = {'H':1, 'O2':1, 'H2O':1}
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
return rxnsys
@pytest.fixture
def test_rev_rxn_sys():
"""Returns a valid reaction system with reversible reaction(s)."""
xml_filename = "tests/test_xml_files/rev_rxn.xml"
xml_parser = XMLParser(xml_filename)
species = xml_parser.get_species()
sql_parser = SQLParser(TEST_DB_PATH, species)
thermo_coeffs = sql_parser.get_thermo_coeffs()
temp = 500 # "low" temperature range in NASA coeffs database
concentrations = {'H':1, 'O2':1, 'H2O':1}
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
return rxnsys
# FIRST ERROR: string vs int!!
def test_rxn_system_functionalities(test_lowT_rxn_sys):
"""Test functions in reaction system at low T."""
# Test sort_reaction_rates() routine
rates = test_lowT_rxn_sys.sort_reaction_rates()
assert rates['H'] == -30.
assert rates['O2'] == -15.
assert rates['H2O'] == 30.
# Test fetching of low temperature NASA matrix
expected_lowT_nasa = {'H': numpy.array([2.50000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.25473660E+05,
-0.44668285E+00]),
'H2O': numpy.array([ 0.41986352E+01, -0.20364017E-02,
0.65203416E-05, -0.54879269E-08,
0.17719680E-11, -0.30293726E+05,
-0.84900901E+00]),
'O2': numpy.array([3.78535371E+00, -3.21928540E-03,
1.12323443E-05, -1.17254068E-08,
4.17659585E-12, 1.02922572E+04,
3.27320239E+00])}
assert (numpy.isclose(numpy.longdouble(test_lowT_rxn_sys.NASA_matrix['H']),
expected_lowT_nasa['H'], atol=1e-16)).all()
assert (numpy.isclose(numpy.longdouble(test_lowT_rxn_sys.NASA_matrix['H2O']),
expected_lowT_nasa['H2O'], atol=1e-16)).all()
assert (numpy.isclose(numpy.longdouble(test_lowT_rxn_sys.NASA_matrix['O2']),
expected_lowT_nasa['O2'], atol=1e-16)).all()
# Second Error - string vs int!!
def test_highT_rxn_system_functionaltiies(test_highT_rxn_sys):
"""Test functions in reaction system at high T."""
# Test fetching of high temperature NASA matrix
expected_highT_nasa = ({'O2': numpy.array([3.45852381E+00, 1.04045351E-03,
-2.79664041E-07, 3.11439672E-11,
-8.55656058E-16, 1.02229063E+04,
4.15264119E+00]),
'H2O': numpy.array([0.26770389E+01, 0.29731816E-02,
-0.77376889E-06, 0.94433514E-10,
-0.42689991E-14, -0.29885894E+05,
0.68825500E+01]),
'H': numpy.array([2.50000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.25473660E+05,
-0.44668285E+00])})
assert (numpy.isclose(numpy.longdouble(test_highT_rxn_sys.NASA_matrix['O2']),
expected_highT_nasa['O2'], atol=1e-16)).all()
assert (numpy.isclose(numpy.longdouble(test_highT_rxn_sys.NASA_matrix['H2O']),
expected_highT_nasa['H2O'], atol=1e-16)).all()
assert (numpy.isclose(numpy.longdouble(test_highT_rxn_sys.NASA_matrix['H']),
expected_highT_nasa['H'], atol=1e-16)).all()
def test_rxn_sys_invalid_temperature():
"""Tests setting up reaction system with invalid temperatures."""
xml_filename = "tests/test_xml_files/rxns_mixed.xml"
xml_parser = XMLParser(xml_filename)
species = xml_parser.get_species()
sql_parser = SQLParser(TEST_DB_PATH, species)
thermo_coeffs = sql_parser.get_thermo_coeffs()
concentrations = {'H':1, 'O2':2, 'OH':1, 'O':4, 'H2O':0, 'H2':1}
temp = 0
with pytest.raises(ValueError):
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
temp = -100
with pytest.raises(ValueError):
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
def test_rxn_sys_get_reaction_rate_for_3_rxns():
"""Tests function to get reaction rate for a given system of reactions (more than 1 reaction)."""
xml_filename = "tests/test_xml_files/rxnsys.xml"
xml_parser = XMLParser(xml_filename)
species = xml_parser.get_species()
sql_parser = SQLParser(TEST_DB_PATH, species)
thermo_coeffs = sql_parser.get_thermo_coeffs()
temp = 10
concentrations = {'H':1, 'O2':1, 'OH':1, 'O':1, 'H2O':1, 'H2':1}
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
rates = rxnsys.sort_reaction_rates()
assert rates['H'] == -10.
assert rates['O2'] == -15.
assert rates['H2O'] == 40.
assert rates['H2'] == -20.
assert rates['O'] == -10.
assert rates['OH'] == 0.
def test_rxn_sys_rev_reaction(test_rev_rxn_sys):
"""Tests setting up reaction system with reversible reaction."""
expected_nasa = {'H': numpy.array([2.50000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.25473660E+05,
-0.44668285E+00]),
'H2O': numpy.array([ 0.41986352E+01, -0.20364017E-02,
0.65203416E-05, -0.54879269E-08,
0.17719680E-11, -0.30293726E+05,
-0.84900901E+00]),
'O2': numpy.array([3.78535371E+00, -3.21928540E-03,
1.12323443E-05, -1.17254068E-08,
4.17659585E-12, 1.02922572E+04,
3.27320239E+00])}
rev_rxn_obj = test_rev_rxn_sys.reaction_list[0]
assert (numpy.isclose(numpy.longdouble(rev_rxn_obj.NASA_poly_coefs_dict['H2O']), expected_nasa['H2O'])).all()
assert (numpy.isclose(numpy.longdouble(rev_rxn_obj.NASA_poly_coefs_dict['O2']), expected_nasa['O2'])).all()
assert (numpy.isclose(numpy.longdouble(rev_rxn_obj.NASA_poly_coefs_dict['H']), expected_nasa['H'])).all()
def test_rxn_sys_irrev_reaction_antioch():
"""Test against Antioch irrev rxn results"""
xml_filename = "tests/test_xml_files/rxns_irreversible_antioch.xml"
xml_parser = XMLParser(xml_filename)
species = xml_parser.get_species()
sql_parser = SQLParser(TEST_DB_PATH, species)
thermo_coeffs = sql_parser.get_thermo_coeffs()
# Condition #1
temp = 2500.0000000000000000
concentrations = ({'H': 5.0000000000000000e-01,
'O': 0.0000000000000000e+00,
'OH': 0.0000000000000000e+00,
'H2': 2.0000000000000000e+00,
'H2O': 0.0000000000000000e+00,
'O2': 1.0000000000000000e+00,
'HO2': 0.0000000000000000e+00,
'H2O2': 0.0000000000000000e+00})
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
rates = rxnsys.sort_reaction_rates()
assert numpy.isclose(rates['H'], -3.3300992971255586e+13, atol=1e-16)
assert numpy.isclose(rates['O'], 3.3300992971255586e+13, atol=1e-16)
assert numpy.isclose(rates['OH'], 3.3300992971255586e+13, atol=1e-16)
assert numpy.isclose(rates['H2'], 0.0000000000000000e+00, atol=1e-16)
assert numpy.isclose(rates['H2O'], 0.0000000000000000e+00, atol=1e-16)
assert numpy.isclose(rates['O2'], -3.3300992971255586e+13, atol=1e-16)
assert numpy.isclose(rates['HO2'], 0.0000000000000000e+00, atol=1e-16)
assert numpy.isclose(rates['H2O2'], 0.0000000000000000e+00, atol=1e-16)
# Condition #2
temp = 2500.0000000000000000
concentrations = ({'H': 5.0000000000000000e-01,
'O': 1.0000000000000001e-01,
'OH': 1.0000000000000000e-02,
'H2': 2.0000000000000000e+00,
'H2O': 2.5000000000000000e-01,
'O2': 1.0000000000000000e+00,
'HO2': 2.9999999999999999e-01,
'H2O2': 2.0000000000000000e-02})
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
rates = rxnsys.sort_reaction_rates()
assert numpy.isclose(rates['H'], -3.7324963347340922e+13, atol=1e-16)
assert numpy.isclose(rates['O'], 2.3071533925003262e+13, atol=1e-16)
assert numpy.isclose(rates['OH'], 6.4368180909475500e+13, atol=1e-16)
assert numpy.isclose(rates['H2'], -6.6439941741054521e+12, atol=1e-16)
assert numpy.isclose(rates['H2O'], 4.9820020841399396e+11, atol=1e-16)
assert numpy.isclose(rates['O2'], -2.9843856969218777e+13, atol=1e-16)
assert numpy.isclose(rates['HO2'], -1.3498571473703539e+13, atol=1e-16)
assert numpy.isclose(rates['H2O2'], -6.2652907852405969e+11, atol=1e-16)
# Condition #3
temp = 950.0000000000000000
concentrations = ({'H': 5.0000000000000000e-01,
'O': 0.0000000000000000e+00,
'OH': 0.0000000000000000e+00,
'H2': 2.0000000000000000e+00,
'H2O': 0.0000000000000000e+00,
'O2': 1.0000000000000000e+00,
'HO2': 0.0000000000000000e+00,
'H2O2': 0.0000000000000000e+00})
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
rates = rxnsys.sort_reaction_rates()
assert numpy.isclose(rates['H'], -1.3403448555187156e+13, atol=1e-16)
assert numpy.isclose(rates['O'], 1.3403448555187156e+13, atol=1e-16)
assert numpy.isclose(rates['OH'], 1.3403448555187156e+13, atol=1e-16)
assert numpy.isclose(rates['H2'], 0.0000000000000000e+00, atol=1e-16)
assert numpy.isclose(rates['H2O'], 0.0000000000000000e+00, atol=1e-16)
assert numpy.isclose(rates['O2'], -1.3403448555187156e+13, atol=1e-16)
assert numpy.isclose(rates['HO2'], 0.0000000000000000e+00, atol=1e-16)
assert numpy.isclose(rates['H2O2'], 0.0000000000000000e+00, atol=1e-16)
# Condition #4
temp = 950.0000000000000000
concentrations = ({'H': 5.0000000000000000e-01,
'O': 1.0000000000000001e-01,
'OH': 1.0000000000000000e-02,
'H2': 2.0000000000000000e+00,
'H2O': 2.5000000000000000e-01,
'O2': 1.0000000000000000e+00,
'HO2': 2.9999999999999999e-01,
'H2O2': 2.0000000000000000e-02})
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
rates = rxnsys.sort_reaction_rates()
assert numpy.isclose(rates['H'], -2.5701654161377395e+13, atol=1e-16)
assert numpy.isclose(rates['O'], 1.1995070447537494e+13, atol=1e-16)
assert numpy.isclose(rates['OH'], 3.5250102331863312e+13, atol=1e-16)
assert numpy.isclose(rates['H2'], 1.9231756318330654e+12, atol=1e-16)
assert numpy.isclose(rates['H2O'], 3.1178427968785107e+11, atol=1e-16)
assert numpy.isclose(rates['O2'], -1.0092501951521918e+13, atol=1e-16)
assert numpy.isclose(rates['HO2'], -1.3353585162517070e+13, atol=1e-16)
assert numpy.isclose(rates['H2O2'], -3.3239141550534125e+11, atol=1e-16)
def test_rxn_sys_rev_reaction_antioch():
"""Test against Antioch rev rxn results"""
xml_filename = "tests/test_xml_files/rxns_reversible_antioch.xml"
xml_parser = XMLParser(xml_filename)
species = xml_parser.get_species()
sql_parser = SQLParser(TEST_DB_PATH, species)
thermo_coeffs = sql_parser.get_thermo_coeffs()
# Condition #1
temp = 900.0000000000000000
concentrations = ({'H': 5.0000000000000000e-01,
'O': 0.0000000000000000e+00,
'OH': 0.0000000000000000e+00,
'H2': 2.0000000000000000e+00,
'H2O': 0.0000000000000000e+00,
'O2': 1.0000000000000000e+00,
'HO2': 0.0000000000000000e+00,
'H2O2': 0.0000000000000000e+00})
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
rates = rxnsys.sort_reaction_rates()
assert numpy.isclose(rates['H'], -1.2191202810057324e+13, atol=1e-16)
assert numpy.isclose(rates['O'], 1.2191204233198564e+13, atol=1e-16)
assert numpy.isclose(rates['OH'], 1.2191204233198564e+13, atol=1e-16)
assert numpy.isclose(rates['H2'], -1.4231412404922757e+06, atol=1e-16)
assert numpy.isclose(rates['H2O'], 0.0000000000000000e+00, atol=1e-16)
assert numpy.isclose(rates['O2'], -1.2191205656339805e+13, atol=1e-16)
assert numpy.isclose(rates['HO2'], 1.4231412404922757e+06, atol=1e-16)
assert numpy.isclose(rates['H2O2'], 0.0000000000000000e+00, atol=1e-16)
# Condition #2
temp = 2500.0000000000000000
concentrations = ({'H': 5.0000000000000000e-01,
'O': 0.0000000000000000e+00,
'OH': 0.0000000000000000e+00,
'H2': 2.0000000000000000e+00,
'H2O': 0.0000000000000000e+00,
'O2': 1.0000000000000000e+00,
'HO2': 0.0000000000000000e+00,
'H2O2': 0.0000000000000000e+00})
rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
rates = rxnsys.sort_reaction_rates()
assert numpy.isclose(rates['H'], -3.3100422956557074e+13, atol=1e-16)
assert numpy.isclose(rates['O'], 3.3300992971255586e+13, atol=1e-16)
assert numpy.isclose(rates['OH'], 3.3300992971255586e+13, atol=1e-16)
assert numpy.isclose(rates['H2'], -2.0057001469851071e+11, atol=1e-16)
assert numpy.isclose(rates['H2O'], 0.0000000000000000e+00, atol=1e-16)
assert numpy.isclose(rates['O2'], -3.3501562985954098e+13, atol=1e-16)
assert numpy.isclose(rates['HO2'], 2.0057001469851071e+11, atol=1e-16)
assert numpy.isclose(rates['H2O2'], 0.0000000000000000e+00, atol=1e-16)
# # Condition #3
# temp = 950.0000000000000000
# concentrations = ({'H': 5.0000000000000000e-01,
# 'O': 0.0000000000000000e+00,
# 'OH': 0.0000000000000000e+00,
# 'H2': 2.0000000000000000e+00,
# 'H2O': 0.0000000000000000e+00,
# 'O2': 1.0000000000000000e+00,
# 'HO2': 0.0000000000000000e+00,
# 'H2O2': 0.0000000000000000e+00})
# rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
# rates = rxnsys.sort_reaction_rates()
# assert numpy.isclose(rates['H'], -1.3403448555170947e+13, atol=1e-16)
# assert numpy.isclose(rates['O'], 1.3403448555187156e+13, atol=1e-16)
# assert numpy.isclose(rates['OH'], 1.3403448555187156e+13, atol=1e-16)
# assert numpy.isclose(rates['H2'], -1.6208366095320475e+01, atol=1e-16)
# assert numpy.isclose(rates['H2O'], 0.0000000000000000e+00, atol=1e-16)
# assert numpy.isclose(rates['O2'], -1.3403448555203365e+13, atol=1e-16)
# assert numpy.isclose(rates['HO2'], 1.6208366095320475e+01, atol=1e-16)
# assert numpy.isclose(rates['H2O2'], 0.0000000000000000e+00, atol=1e-16)
# # Condition #4
# temp = 950.0000000000000000
# concentrations = ({'H': 5.0000000000000000e-01,
# 'O': 1.0000000000000001e-01,
# 'OH': 1.0000000000000000e-02,
# 'H2': 2.0000000000000000e+00,
# 'H2O': 2.5000000000000000e-01,
# 'O2': 1.0000000000000000e+00,
# 'HO2': 2.9999999999999999e-01,
# 'H2O2': 2.0000000000000000e-02})
# rxnsys = ReactionSystem(xml_parser.reaction_list, thermo_coeffs, temp, concentrations)
# rates = rxnsys.sort_reaction_rates()
# assert numpy.isclose(rates['H'], -1.7750736729894043e+13, atol=1e-16)
# assert numpy.isclose(rates['O'], 4.0714901298639282e+12, atol=1e-16)
# assert numpy.isclose(rates['OH'], 2.7224152308533266e+13, atol=1e-16)
# assert numpy.isclose(rates['H2'], 1.9335776122987725e+12, atol=1e-16)
# assert numpy.isclose(rates['H2O'], 3.3867579679335474e+11, atol=1e-16)
# assert numpy.isclose(rates['O2'], -2.1311825395902986e+12, atol=1e-16)
# assert numpy.isclose(rates['HO2'], -1.3354030759186477e+13, atol=1e-16)
# assert numpy.isclose(rates['H2O2'], -3.3194581881849854e+11, atol=1e-16)
| 51.181319
| 113
| 0.61825
| 2,230
| 18,630
| 5.031839
| 0.103139
| 0.071562
| 0.117102
| 0.131183
| 0.83923
| 0.820515
| 0.789056
| 0.778095
| 0.761964
| 0.726138
| 0
| 0.263301
| 0.248363
| 18,630
| 363
| 114
| 51.322314
| 0.538028
| 0.173108
| 0
| 0.606178
| 0
| 0
| 0.039196
| 0.01649
| 0
| 0
| 0
| 0
| 0.254826
| 1
| 0.03861
| false
| 0
| 0.034749
| 0
| 0.084942
| 0.003861
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e81738081d7d574b8390d9ae21fd569c4cd82f83
| 197
|
py
|
Python
|
examples/aspectl/advices.py
|
duelle/kieker-lang-pack-python
|
563703e8c3f036f6d23b4828135b8c0da31983d7
|
[
"Apache-2.0"
] | 2
|
2021-11-30T14:55:57.000Z
|
2021-12-26T23:05:16.000Z
|
examples/aspectl/advices.py
|
duelle/kieker-lang-pack-python
|
563703e8c3f036f6d23b4828135b8c0da31983d7
|
[
"Apache-2.0"
] | 4
|
2020-04-27T09:02:18.000Z
|
2021-06-02T13:41:50.000Z
|
examples/aspectl/advices.py
|
duelle/kieker-lang-pack-python
|
563703e8c3f036f6d23b4828135b8c0da31983d7
|
[
"Apache-2.0"
] | 2
|
2021-06-17T15:55:06.000Z
|
2021-11-30T12:15:21.000Z
|
# -*- coding: utf-8 -*-
import examples.aspectl.instrument as inst
import aspectlib
import examples.aspectl.bookstore
aspectlib.weave(examples.aspectl.bookstore.Bookstore, inst.wrapper)
print(2)
| 24.625
| 67
| 0.791878
| 25
| 197
| 6.24
| 0.6
| 0.288462
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011173
| 0.091371
| 197
| 8
| 68
| 24.625
| 0.860335
| 0.106599
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0.2
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e82dad7ad3c02e6961e96351946dc62a16af205c
| 244
|
py
|
Python
|
test/test_datahandling.py
|
Undo1/Smokey-McSmokeface
|
b1d7e3f86941ef200a194ee8e362c30499ee6a5e
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
test/test_datahandling.py
|
Undo1/Smokey-McSmokeface
|
b1d7e3f86941ef200a194ee8e362c30499ee6a5e
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
test/test_datahandling.py
|
Undo1/Smokey-McSmokeface
|
b1d7e3f86941ef200a194ee8e362c30499ee6a5e
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from datahandling import append_pings
def test_append_pings():
assert append_pings("foo", ["user1", "some user"]) == "foo (@user1 @someuser)"
assert append_pings("foo", [u"Doorknob 冰"]) == u"foo (@Doorknob冰)"
| 27.111111
| 82
| 0.647541
| 32
| 244
| 4.78125
| 0.625
| 0.287582
| 0.222222
| 0.261438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014563
| 0.155738
| 244
| 8
| 83
| 30.5
| 0.728155
| 0.086066
| 0
| 0
| 0
| 0
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e83708804bad03ade8741dcd108d6801e1d610c9
| 3,208
|
py
|
Python
|
tests/unit_tests/test_range_switch.py
|
hobbe/teslajsonpy
|
1d185a13ddf8024d74bd7a6bec5d798ca0270f61
|
[
"Apache-2.0"
] | null | null | null |
tests/unit_tests/test_range_switch.py
|
hobbe/teslajsonpy
|
1d185a13ddf8024d74bd7a6bec5d798ca0270f61
|
[
"Apache-2.0"
] | null | null | null |
tests/unit_tests/test_range_switch.py
|
hobbe/teslajsonpy
|
1d185a13ddf8024d74bd7a6bec5d798ca0270f61
|
[
"Apache-2.0"
] | null | null | null |
"""Test range switch."""
import pytest
from tests.tesla_mock import TeslaMock
from teslajsonpy.controller import Controller
from teslajsonpy.charger import RangeSwitch
def test_has_battery(monkeypatch):
"""Test has_battery()."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_switch = RangeSwitch(_data, _controller)
assert not _switch.has_battery()
def test_is_maxrange_on_init(monkeypatch):
"""Test is_maxrange() when not charging."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_switch = RangeSwitch(_data, _controller)
assert not _switch.is_maxrange()
@pytest.mark.asyncio
async def test_is_maxrange_on(monkeypatch):
"""Test is_maxrange() with charging state charging."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["charge_state"]["charge_to_max_range"] = True
_switch = RangeSwitch(_data, _controller)
await _switch.async_update()
assert _switch.is_maxrange()
@pytest.mark.asyncio
async def test_is_maxrange_off(monkeypatch):
"""Test is_maxrange() with charging state disconnected."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["charge_state"]["charge_to_max_range"] = False
_switch = RangeSwitch(_data, _controller)
await _switch.async_update()
assert not _switch.is_maxrange()
@pytest.mark.asyncio
async def test_set_max(monkeypatch):
"""Test set_max()."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["charge_state"]["charge_to_max_range"] = False
_switch = RangeSwitch(_data, _controller)
await _switch.async_update()
await _switch.set_max()
assert _switch.is_maxrange()
@pytest.mark.asyncio
async def test_set_standard(monkeypatch):
"""Test set_standard()."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["charge_state"]["charge_to_max_range"] = True
_switch = RangeSwitch(_data, _controller)
await _switch.async_update()
await _switch.set_standard()
assert not _switch.is_maxrange()
@pytest.mark.asyncio
async def test_async_update(monkeypatch):
"""Test async_update()."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["charge_state"]["charge_to_max_range"] = True
_switch = RangeSwitch(_data, _controller)
await _switch.async_update()
assert _switch.is_maxrange()
@pytest.mark.asyncio
async def test_async_update_with_change(monkeypatch):
"""Test async_update() after an update."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["charge_state"]["charge_to_max_range"] = True
_switch = RangeSwitch(_data, _controller)
_data["charge_state"]["charge_to_max_range"] = False
await _switch.async_update()
assert not _switch.is_maxrange()
| 25.460317
| 62
| 0.725998
| 374
| 3,208
| 5.772727
| 0.131016
| 0.060213
| 0.08893
| 0.125984
| 0.826308
| 0.816582
| 0.816582
| 0.777675
| 0.761
| 0.739231
| 0
| 0
| 0.165835
| 3,208
| 125
| 63
| 25.664
| 0.806801
| 0.023691
| 0
| 0.794521
| 0
| 0
| 0.075113
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 1
| 0.027397
| false
| 0
| 0.054795
| 0
| 0.082192
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c702369789835ad696e217ebc4cee77f5e90329a
| 22,752
|
py
|
Python
|
tests/test_kinesis/test_kinesis.py
|
alexsult/moto
|
ed861ecae1039a048a6350a4ff832ef094cdf2c2
|
[
"Apache-2.0"
] | 2
|
2019-07-10T14:44:12.000Z
|
2020-06-08T17:26:29.000Z
|
tests/test_kinesis/test_kinesis.py
|
alexsult/moto
|
ed861ecae1039a048a6350a4ff832ef094cdf2c2
|
[
"Apache-2.0"
] | 5
|
2018-04-25T21:04:20.000Z
|
2018-11-02T19:59:27.000Z
|
tests/test_kinesis/test_kinesis.py
|
alexsult/moto
|
ed861ecae1039a048a6350a4ff832ef094cdf2c2
|
[
"Apache-2.0"
] | 2
|
2020-07-24T18:14:07.000Z
|
2020-12-10T10:55:26.000Z
|
from __future__ import unicode_literals
import boto.kinesis
from boto.kinesis.exceptions import ResourceNotFoundException, InvalidArgumentException
import boto3
import sure # noqa
import datetime
import time
from moto import mock_kinesis, mock_kinesis_deprecated
@mock_kinesis_deprecated
def test_create_cluster():
conn = boto.kinesis.connect_to_region("us-west-2")
conn.create_stream("my_stream", 2)
stream_response = conn.describe_stream("my_stream")
stream = stream_response["StreamDescription"]
stream["StreamName"].should.equal("my_stream")
stream["HasMoreShards"].should.equal(False)
stream["StreamARN"].should.equal(
"arn:aws:kinesis:us-west-2:123456789012:my_stream")
stream["StreamStatus"].should.equal("ACTIVE")
shards = stream['Shards']
shards.should.have.length_of(2)
@mock_kinesis_deprecated
def test_describe_non_existant_stream():
conn = boto.kinesis.connect_to_region("us-east-1")
conn.describe_stream.when.called_with(
"not-a-stream").should.throw(ResourceNotFoundException)
@mock_kinesis_deprecated
def test_list_and_delete_stream():
conn = boto.kinesis.connect_to_region("us-west-2")
conn.create_stream("stream1", 1)
conn.create_stream("stream2", 1)
conn.list_streams()['StreamNames'].should.have.length_of(2)
conn.delete_stream("stream2")
conn.list_streams()['StreamNames'].should.have.length_of(1)
# Delete invalid id
conn.delete_stream.when.called_with(
"not-a-stream").should.throw(ResourceNotFoundException)
@mock_kinesis
def test_list_many_streams():
conn = boto3.client('kinesis', region_name="us-west-2")
for i in range(11):
conn.create_stream(StreamName="stream%d" % i, ShardCount=1)
resp = conn.list_streams()
stream_names = resp["StreamNames"]
has_more_streams = resp["HasMoreStreams"]
stream_names.should.have.length_of(10)
has_more_streams.should.be(True)
resp2 = conn.list_streams(ExclusiveStartStreamName=stream_names[-1])
stream_names = resp2["StreamNames"]
has_more_streams = resp2["HasMoreStreams"]
stream_names.should.have.length_of(1)
has_more_streams.should.equal(False)
@mock_kinesis_deprecated
def test_basic_shard_iterator():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
response = conn.describe_stream(stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(stream_name, shard_id, 'TRIM_HORIZON')
shard_iterator = response['ShardIterator']
response = conn.get_records(shard_iterator)
shard_iterator = response['NextShardIterator']
response['Records'].should.equal([])
response['MillisBehindLatest'].should.equal(0)
@mock_kinesis_deprecated
def test_get_invalid_shard_iterator():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
conn.get_shard_iterator.when.called_with(
stream_name, "123", 'TRIM_HORIZON').should.throw(ResourceNotFoundException)
@mock_kinesis_deprecated
def test_put_records():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
data = "hello world"
partition_key = "1234"
conn.put_record.when.called_with(
stream_name, data, 1234).should.throw(InvalidArgumentException)
conn.put_record(stream_name, data, partition_key)
response = conn.describe_stream(stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(stream_name, shard_id, 'TRIM_HORIZON')
shard_iterator = response['ShardIterator']
response = conn.get_records(shard_iterator)
shard_iterator = response['NextShardIterator']
response['Records'].should.have.length_of(1)
record = response['Records'][0]
record["Data"].should.equal("hello world")
record["PartitionKey"].should.equal("1234")
record["SequenceNumber"].should.equal("1")
@mock_kinesis_deprecated
def test_get_records_limit():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
# Create some data
data = "hello world"
for index in range(5):
conn.put_record(stream_name, data, str(index))
# Get a shard iterator
response = conn.describe_stream(stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(stream_name, shard_id, 'TRIM_HORIZON')
shard_iterator = response['ShardIterator']
# Retrieve only 3 records
response = conn.get_records(shard_iterator, limit=3)
response['Records'].should.have.length_of(3)
# Then get the rest of the results
next_shard_iterator = response['NextShardIterator']
response = conn.get_records(next_shard_iterator)
response['Records'].should.have.length_of(2)
@mock_kinesis_deprecated
def test_get_records_at_sequence_number():
# AT_SEQUENCE_NUMBER - Start reading exactly from the position denoted by
# a specific sequence number.
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
# Create some data
for index in range(1, 5):
conn.put_record(stream_name, str(index), str(index))
# Get a shard iterator
response = conn.describe_stream(stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(stream_name, shard_id, 'TRIM_HORIZON')
shard_iterator = response['ShardIterator']
# Get the second record
response = conn.get_records(shard_iterator, limit=2)
second_sequence_id = response['Records'][1]['SequenceNumber']
# Then get a new iterator starting at that id
response = conn.get_shard_iterator(
stream_name, shard_id, 'AT_SEQUENCE_NUMBER', second_sequence_id)
shard_iterator = response['ShardIterator']
response = conn.get_records(shard_iterator)
# And the first result returned should be the second item
response['Records'][0]['SequenceNumber'].should.equal(second_sequence_id)
response['Records'][0]['Data'].should.equal('2')
@mock_kinesis_deprecated
def test_get_records_after_sequence_number():
# AFTER_SEQUENCE_NUMBER - Start reading right after the position denoted
# by a specific sequence number.
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
# Create some data
for index in range(1, 5):
conn.put_record(stream_name, str(index), str(index))
# Get a shard iterator
response = conn.describe_stream(stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(stream_name, shard_id, 'TRIM_HORIZON')
shard_iterator = response['ShardIterator']
# Get the second record
response = conn.get_records(shard_iterator, limit=2)
second_sequence_id = response['Records'][1]['SequenceNumber']
# Then get a new iterator starting after that id
response = conn.get_shard_iterator(
stream_name, shard_id, 'AFTER_SEQUENCE_NUMBER', second_sequence_id)
shard_iterator = response['ShardIterator']
response = conn.get_records(shard_iterator)
# And the first result returned should be the third item
response['Records'][0]['Data'].should.equal('3')
response['MillisBehindLatest'].should.equal(0)
@mock_kinesis_deprecated
def test_get_records_latest():
# LATEST - Start reading just after the most recent record in the shard,
# so that you always read the most recent data in the shard.
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
# Create some data
for index in range(1, 5):
conn.put_record(stream_name, str(index), str(index))
# Get a shard iterator
response = conn.describe_stream(stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(stream_name, shard_id, 'TRIM_HORIZON')
shard_iterator = response['ShardIterator']
# Get the second record
response = conn.get_records(shard_iterator, limit=2)
second_sequence_id = response['Records'][1]['SequenceNumber']
# Then get a new iterator starting after that id
response = conn.get_shard_iterator(
stream_name, shard_id, 'LATEST', second_sequence_id)
shard_iterator = response['ShardIterator']
# Write some more data
conn.put_record(stream_name, "last_record", "last_record")
response = conn.get_records(shard_iterator)
# And the only result returned should be the new item
response['Records'].should.have.length_of(1)
response['Records'][0]['PartitionKey'].should.equal('last_record')
response['Records'][0]['Data'].should.equal('last_record')
response['MillisBehindLatest'].should.equal(0)
@mock_kinesis
def test_get_records_at_timestamp():
# AT_TIMESTAMP - Read the first record at or after the specified timestamp
conn = boto3.client('kinesis', region_name="us-west-2")
stream_name = "my_stream"
conn.create_stream(StreamName=stream_name, ShardCount=1)
# Create some data
for index in range(1, 5):
conn.put_record(StreamName=stream_name,
Data=str(index),
PartitionKey=str(index))
# When boto3 floors the timestamp that we pass to get_shard_iterator to
# second precision even though AWS supports ms precision:
# http://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html
# To test around this limitation we wait until we well into the next second
# before capturing the time and storing the records we expect to retrieve.
time.sleep(1.0)
timestamp = datetime.datetime.utcnow()
keys = [str(i) for i in range(5, 10)]
for k in keys:
conn.put_record(StreamName=stream_name,
Data=k,
PartitionKey=k)
# Get a shard iterator
response = conn.describe_stream(StreamName=stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(StreamName=stream_name,
ShardId=shard_id,
ShardIteratorType='AT_TIMESTAMP',
Timestamp=timestamp)
shard_iterator = response['ShardIterator']
response = conn.get_records(ShardIterator=shard_iterator)
response['Records'].should.have.length_of(len(keys))
partition_keys = [r['PartitionKey'] for r in response['Records']]
partition_keys.should.equal(keys)
response['MillisBehindLatest'].should.equal(0)
@mock_kinesis
def test_get_records_at_very_old_timestamp():
conn = boto3.client('kinesis', region_name="us-west-2")
stream_name = "my_stream"
conn.create_stream(StreamName=stream_name, ShardCount=1)
# Create some data
keys = [str(i) for i in range(1, 5)]
for k in keys:
conn.put_record(StreamName=stream_name,
Data=k,
PartitionKey=k)
# Get a shard iterator
response = conn.describe_stream(StreamName=stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(StreamName=stream_name,
ShardId=shard_id,
ShardIteratorType='AT_TIMESTAMP',
Timestamp=1)
shard_iterator = response['ShardIterator']
response = conn.get_records(ShardIterator=shard_iterator)
response['Records'].should.have.length_of(len(keys))
partition_keys = [r['PartitionKey'] for r in response['Records']]
partition_keys.should.equal(keys)
response['MillisBehindLatest'].should.equal(0)
@mock_kinesis
def test_get_records_timestamp_filtering():
conn = boto3.client('kinesis', region_name="us-west-2")
stream_name = "my_stream"
conn.create_stream(StreamName=stream_name, ShardCount=1)
conn.put_record(StreamName=stream_name,
Data='0',
PartitionKey='0')
time.sleep(1.0)
timestamp = datetime.datetime.utcnow()
conn.put_record(StreamName=stream_name,
Data='1',
PartitionKey='1')
response = conn.describe_stream(StreamName=stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(StreamName=stream_name,
ShardId=shard_id,
ShardIteratorType='AT_TIMESTAMP',
Timestamp=timestamp)
shard_iterator = response['ShardIterator']
response = conn.get_records(ShardIterator=shard_iterator)
response['Records'].should.have.length_of(1)
response['Records'][0]['PartitionKey'].should.equal('1')
response['Records'][0]['ApproximateArrivalTimestamp'].should.be.\
greater_than(timestamp)
response['MillisBehindLatest'].should.equal(0)
@mock_kinesis
def test_get_records_millis_behind_latest():
conn = boto3.client('kinesis', region_name="us-west-2")
stream_name = "my_stream"
conn.create_stream(StreamName=stream_name, ShardCount=1)
conn.put_record(StreamName=stream_name,
Data='0',
PartitionKey='0')
time.sleep(1.0)
conn.put_record(StreamName=stream_name,
Data='1',
PartitionKey='1')
response = conn.describe_stream(StreamName=stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(StreamName=stream_name,
ShardId=shard_id,
ShardIteratorType='TRIM_HORIZON')
shard_iterator = response['ShardIterator']
response = conn.get_records(ShardIterator=shard_iterator, Limit=1)
response['Records'].should.have.length_of(1)
response['MillisBehindLatest'].should.be.greater_than(0)
@mock_kinesis
def test_get_records_at_very_new_timestamp():
conn = boto3.client('kinesis', region_name="us-west-2")
stream_name = "my_stream"
conn.create_stream(StreamName=stream_name, ShardCount=1)
# Create some data
keys = [str(i) for i in range(1, 5)]
for k in keys:
conn.put_record(StreamName=stream_name,
Data=k,
PartitionKey=k)
timestamp = datetime.datetime.utcnow() + datetime.timedelta(seconds=1)
# Get a shard iterator
response = conn.describe_stream(StreamName=stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(StreamName=stream_name,
ShardId=shard_id,
ShardIteratorType='AT_TIMESTAMP',
Timestamp=timestamp)
shard_iterator = response['ShardIterator']
response = conn.get_records(ShardIterator=shard_iterator)
response['Records'].should.have.length_of(0)
response['MillisBehindLatest'].should.equal(0)
@mock_kinesis
def test_get_records_from_empty_stream_at_timestamp():
conn = boto3.client('kinesis', region_name="us-west-2")
stream_name = "my_stream"
conn.create_stream(StreamName=stream_name, ShardCount=1)
timestamp = datetime.datetime.utcnow()
# Get a shard iterator
response = conn.describe_stream(StreamName=stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator(StreamName=stream_name,
ShardId=shard_id,
ShardIteratorType='AT_TIMESTAMP',
Timestamp=timestamp)
shard_iterator = response['ShardIterator']
response = conn.get_records(ShardIterator=shard_iterator)
response['Records'].should.have.length_of(0)
response['MillisBehindLatest'].should.equal(0)
@mock_kinesis_deprecated
def test_invalid_shard_iterator_type():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
response = conn.describe_stream(stream_name)
shard_id = response['StreamDescription']['Shards'][0]['ShardId']
response = conn.get_shard_iterator.when.called_with(
stream_name, shard_id, 'invalid-type').should.throw(InvalidArgumentException)
@mock_kinesis_deprecated
def test_add_tags():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
conn.describe_stream(stream_name)
conn.add_tags_to_stream(stream_name, {'tag1': 'val1'})
conn.add_tags_to_stream(stream_name, {'tag2': 'val2'})
conn.add_tags_to_stream(stream_name, {'tag1': 'val3'})
conn.add_tags_to_stream(stream_name, {'tag2': 'val4'})
@mock_kinesis_deprecated
def test_list_tags():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
conn.describe_stream(stream_name)
conn.add_tags_to_stream(stream_name, {'tag1': 'val1'})
tags = dict([(tag['Key'], tag['Value'])
for tag in conn.list_tags_for_stream(stream_name)['Tags']])
tags.get('tag1').should.equal('val1')
conn.add_tags_to_stream(stream_name, {'tag2': 'val2'})
tags = dict([(tag['Key'], tag['Value'])
for tag in conn.list_tags_for_stream(stream_name)['Tags']])
tags.get('tag2').should.equal('val2')
conn.add_tags_to_stream(stream_name, {'tag1': 'val3'})
tags = dict([(tag['Key'], tag['Value'])
for tag in conn.list_tags_for_stream(stream_name)['Tags']])
tags.get('tag1').should.equal('val3')
conn.add_tags_to_stream(stream_name, {'tag2': 'val4'})
tags = dict([(tag['Key'], tag['Value'])
for tag in conn.list_tags_for_stream(stream_name)['Tags']])
tags.get('tag2').should.equal('val4')
@mock_kinesis_deprecated
def test_remove_tags():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = "my_stream"
conn.create_stream(stream_name, 1)
conn.describe_stream(stream_name)
conn.add_tags_to_stream(stream_name, {'tag1': 'val1'})
tags = dict([(tag['Key'], tag['Value'])
for tag in conn.list_tags_for_stream(stream_name)['Tags']])
tags.get('tag1').should.equal('val1')
conn.remove_tags_from_stream(stream_name, ['tag1'])
tags = dict([(tag['Key'], tag['Value'])
for tag in conn.list_tags_for_stream(stream_name)['Tags']])
tags.get('tag1').should.equal(None)
conn.add_tags_to_stream(stream_name, {'tag2': 'val2'})
tags = dict([(tag['Key'], tag['Value'])
for tag in conn.list_tags_for_stream(stream_name)['Tags']])
tags.get('tag2').should.equal('val2')
conn.remove_tags_from_stream(stream_name, ['tag2'])
tags = dict([(tag['Key'], tag['Value'])
for tag in conn.list_tags_for_stream(stream_name)['Tags']])
tags.get('tag2').should.equal(None)
@mock_kinesis_deprecated
def test_split_shard():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = 'my_stream'
conn.create_stream(stream_name, 2)
# Create some data
for index in range(1, 100):
conn.put_record(stream_name, str(index), str(index))
stream_response = conn.describe_stream(stream_name)
stream = stream_response["StreamDescription"]
shards = stream['Shards']
shards.should.have.length_of(2)
sum([shard['SequenceNumberRange']['EndingSequenceNumber']
for shard in shards]).should.equal(99)
shard_range = shards[0]['HashKeyRange']
new_starting_hash = (
int(shard_range['EndingHashKey']) + int(shard_range['StartingHashKey'])) // 2
conn.split_shard("my_stream", shards[0]['ShardId'], str(new_starting_hash))
stream_response = conn.describe_stream(stream_name)
stream = stream_response["StreamDescription"]
shards = stream['Shards']
shards.should.have.length_of(3)
sum([shard['SequenceNumberRange']['EndingSequenceNumber']
for shard in shards]).should.equal(99)
shard_range = shards[2]['HashKeyRange']
new_starting_hash = (
int(shard_range['EndingHashKey']) + int(shard_range['StartingHashKey'])) // 2
conn.split_shard("my_stream", shards[2]['ShardId'], str(new_starting_hash))
stream_response = conn.describe_stream(stream_name)
stream = stream_response["StreamDescription"]
shards = stream['Shards']
shards.should.have.length_of(4)
sum([shard['SequenceNumberRange']['EndingSequenceNumber']
for shard in shards]).should.equal(99)
@mock_kinesis_deprecated
def test_merge_shards():
conn = boto.kinesis.connect_to_region("us-west-2")
stream_name = 'my_stream'
conn.create_stream(stream_name, 4)
# Create some data
for index in range(1, 100):
conn.put_record(stream_name, str(index), str(index))
stream_response = conn.describe_stream(stream_name)
stream = stream_response["StreamDescription"]
shards = stream['Shards']
shards.should.have.length_of(4)
conn.merge_shards.when.called_with(
stream_name, 'shardId-000000000000', 'shardId-000000000002').should.throw(InvalidArgumentException)
stream_response = conn.describe_stream(stream_name)
stream = stream_response["StreamDescription"]
shards = stream['Shards']
shards.should.have.length_of(4)
sum([shard['SequenceNumberRange']['EndingSequenceNumber']
for shard in shards]).should.equal(99)
conn.merge_shards(stream_name, 'shardId-000000000000',
'shardId-000000000001')
stream_response = conn.describe_stream(stream_name)
stream = stream_response["StreamDescription"]
shards = stream['Shards']
shards.should.have.length_of(3)
sum([shard['SequenceNumberRange']['EndingSequenceNumber']
for shard in shards]).should.equal(99)
conn.merge_shards(stream_name, 'shardId-000000000002',
'shardId-000000000000')
stream_response = conn.describe_stream(stream_name)
stream = stream_response["StreamDescription"]
shards = stream['Shards']
shards.should.have.length_of(2)
sum([shard['SequenceNumberRange']['EndingSequenceNumber']
for shard in shards]).should.equal(99)
| 36.4032
| 107
| 0.688203
| 2,815
| 22,752
| 5.31865
| 0.082771
| 0.078814
| 0.053433
| 0.026115
| 0.838632
| 0.809177
| 0.792546
| 0.769637
| 0.739848
| 0.72876
| 0
| 0.017503
| 0.191412
| 22,752
| 624
| 108
| 36.461538
| 0.796325
| 0.067994
| 0
| 0.732719
| 0
| 0
| 0.142668
| 0.004537
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052995
| false
| 0
| 0.018433
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c706b09be6481c2eee1119f60c49780dee8a7769
| 11,640
|
py
|
Python
|
src/pyABFauto/analyses/stimulation.py
|
swharden/pyABFauto
|
d353d065aa3bfcef4f4a36fdfed440a6b6f0be9a
|
[
"MIT"
] | 8
|
2018-11-19T13:15:48.000Z
|
2022-02-20T22:35:52.000Z
|
src/pyABFauto/analyses/stimulation.py
|
swharden/pyABFauto
|
d353d065aa3bfcef4f4a36fdfed440a6b6f0be9a
|
[
"MIT"
] | 7
|
2018-11-17T13:39:52.000Z
|
2020-02-18T23:22:05.000Z
|
src/pyABFauto/analyses/stimulation.py
|
swharden/pyABFauto
|
d353d065aa3bfcef4f4a36fdfed440a6b6f0be9a
|
[
"MIT"
] | 1
|
2019-12-20T05:15:23.000Z
|
2019-12-20T05:15:23.000Z
|
"""
????????????????????????????
"""
import pyabf
import pyabf.tools
import pyabf.tools.memtest
import pyabf.filter
import pyABFauto
import matplotlib.pyplot as plt
import numpy as np
def optoResponse(abf, fig, optoEpochNumber=3):
if abf.sweepUnitsY == "pA" and abf.dataLengthMin > 5:
figureShowOptoResponseOverTime(abf, fig, optoEpochNumber)
else:
figureTestOptoResponse(abf, fig, optoEpochNumber)
pass
def getMeanSweep(abf, baseline=None):
assert isinstance(abf, pyabf.ABF)
meanSweep = np.zeros(len(abf.sweepY))
for sweepNumber in abf.sweepList:
abf.setSweep(sweepNumber, baseline=baseline)
meanSweep += abf.sweepY
meanSweep /= abf.sweepCount
return meanSweep
def figureTestOptoResponse(abf, fig, optoEpochNumber=3):
assert isinstance(abf, pyabf.ABF)
assert isinstance(fig, pyABFauto.figure.Figure)
optoPointOn = abf.sweepEpochs.p1s[optoEpochNumber]
optoPointOff = abf.sweepEpochs.p2s[optoEpochNumber]
optoTimeOn = optoPointOn * abf.dataSecPerPoint
optoTimeOff = optoPointOff * abf.dataSecPerPoint
optoDuration = optoTimeOff - optoTimeOn
dataPadSec = 0.2
dataPadPoints = int(dataPadSec * abf.dataRate)
displayPoint1 = int(optoPointOn - dataPadPoints)
displayPoint2 = int(optoPointOff + dataPadPoints)
plt.title("Optogenetic Response (%d sweeps)" % abf.sweepCount)
baseline = [optoTimeOn - dataPadSec, optoTimeOn]
plt.subplot(211)
plt.title("Stacked Sweeps")
yOffset = 100
for sweepNumber in abf.sweepList:
abf.setSweep(sweepNumber, baseline=baseline)
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
abf.sweepY[displayPoint1:displayPoint2] + sweepNumber*yOffset,
color='b')
plt.ylabel(abf.sweepLabelY)
plt.xlabel(abf.sweepLabelX)
plt.margins(0, .1)
plt.axvspan(optoTimeOn, optoTimeOff, alpha=.5, color='y', edgecolor='y')
plt.subplot(212)
optoPeriod = abf.sweepEpochs.pulsePeriods[optoEpochNumber] / abf.dataRate
if (optoPeriod==0):
optoPeriod = 1
optoHz = 1 / optoPeriod
optoDur = abf.sweepEpochs.pulseWidths[optoEpochNumber] / abf.dataRate * 1000
plt.title(f"Average Sweep ({optoHz}Hz of {optoDur}ms pulses)")
for sweepNumber in abf.sweepList:
abf.setSweep(sweepNumber, baseline=baseline)
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
abf.sweepY[displayPoint1:displayPoint2],
alpha=.2, color='.5')
meanSweep = getMeanSweep(abf, baseline=baseline)
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
meanSweep[displayPoint1:displayPoint2],
color='b')
plt.ylabel(abf.sweepLabelY)
plt.xlabel(abf.sweepLabelX)
plt.margins(0, .1)
plt.axvspan(optoTimeOn, optoTimeOff, alpha=.5, color='y', edgecolor='y')
def figureShowOptoResponseOverTime(abf, fig, optoEpochNumber=3):
assert isinstance(abf, pyabf.ABF)
assert isinstance(fig, pyABFauto.figure.Figure)
mt = pyabf.tools.memtest.Memtest(abf)
optoPointOn = abf.sweepEpochs.p1s[optoEpochNumber]
optoPointOff = abf.sweepEpochs.p2s[optoEpochNumber]
optoTimeOn = optoPointOn * abf.dataSecPerPoint
optoTimeOff = optoPointOff * abf.dataSecPerPoint
dataPadSec = 0.2
dataPadPoints = int(dataPadSec * abf.dataRate)
displayPoint1 = int(optoPointOn - dataPadPoints)
displayPoint2 = int(optoPointOff + dataPadPoints)
plt.title("Optogenetic Response (%d sweeps)" % abf.sweepCount)
baseline = [optoTimeOn - .1, optoTimeOn - .05]
measure = [optoTimeOn, optoTimeOn + .02]
measureI1 = int(measure[0] * abf.dataRate)
measureI2 = int(measure[1] * abf.dataRate)
means = np.full(abf.sweepCount, np.nan)
sweepTimesSec = np.arange(abf.sweepCount) * abf.sweepIntervalSec
sweepTimesMin = sweepTimesSec / 60
plt.subplot(221)
plt.title("Average Sweep")
for sweepNumber in abf.sweepList:
abf.setSweep(sweepNumber, baseline=baseline)
means[sweepNumber] = np.mean(abf.sweepY[measureI1:measureI2])
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
abf.sweepY[displayPoint1:displayPoint2],
alpha=.2, color='.5')
meanSweep = getMeanSweep(abf, baseline=baseline)
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
meanSweep[displayPoint1:displayPoint2],
color='b')
plt.ylabel(abf.sweepLabelY)
plt.xlabel(abf.sweepLabelX)
plt.margins(0, .1)
plt.axvspan(optoTimeOn, optoTimeOff, alpha=.5, color='y')
plt.axvspan(baseline[0], baseline[1], alpha=.2, color='k')
plt.axvspan(measure[0], measure[1], alpha=.2, color='r')
plt.subplot(222)
fig.grid()
plt.title("Evoked Current")
plt.axhline(0, color='k', ls='--')
plt.plot(sweepTimesMin, means, '.-')
fig.addTagLines(minutes=True)
plt.ylabel("Evoked Current (pA)")
plt.xlabel("Experiment Time (minutes)")
plt.margins(.1, .3)
plt.subplot(223)
fig.grid()
plt.title(mt.Ih.name)
plt.ylabel(mt.Ih.units)
plt.xlabel("Experiment Time (minutes)")
plt.plot(sweepTimesMin, mt.Ih.values, '.-')
plt.margins(.1, .3)
fig.addTagLines(minutes=True)
plt.subplot(224)
fig.grid()
plt.title(mt.Ra.name)
plt.ylabel(mt.Ra.units)
plt.xlabel("Experiment Time (minutes)")
plt.plot(sweepTimesMin, mt.Ra.values, '.-')
fig.addTagLines(minutes=True)
plt.margins(.1, .3)
plt.axis([None, None, 0, None])
def figureTestElectricalResponseVC(abf, fig, stimEpochNumber=3):
assert isinstance(abf, pyabf.ABF)
assert isinstance(fig, pyABFauto.figure.Figure)
mt = pyabf.tools.memtest.Memtest(abf)
optoPointOn = abf.sweepEpochs.p1s[stimEpochNumber]
optoPointOff = abf.sweepEpochs.p2s[stimEpochNumber]
optoTimeOn = optoPointOn * abf.dataSecPerPoint
optoTimeOff = optoPointOff * abf.dataSecPerPoint
displayPoint1 = int(optoPointOn - 0.03 * abf.dataRate)
displayPoint2 = int(optoPointOff + 0.05 * abf.dataRate)
baseline = [optoTimeOn - .02, optoTimeOn - .01]
measure = [optoTimeOff + .003, optoTimeOff + .015]
measureI1 = int(measure[0] * abf.dataRate)
measureI2 = int(measure[1] * abf.dataRate)
means = np.full(abf.sweepCount, np.nan)
sweepTimesSec = np.arange(abf.sweepCount) * abf.sweepIntervalSec
sweepTimesMin = sweepTimesSec / 60
plt.subplot(221)
fig.grid()
plt.title("Electrical Response (%d sweeps)" % abf.sweepCount)
for sweepNumber in abf.sweepList:
abf.setSweep(sweepNumber, baseline=baseline)
means[sweepNumber] = np.mean(abf.sweepY[measureI1:measureI2])
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
abf.sweepY[displayPoint1:displayPoint2],
alpha=.2, color='.5')
meanSweep = getMeanSweep(abf, baseline=baseline)
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
meanSweep[displayPoint1:displayPoint2],
color='b')
plt.ylabel(abf.sweepLabelY)
plt.xlabel(abf.sweepLabelX)
plt.margins(0, .1)
plt.axvspan(optoTimeOn, optoTimeOff, alpha=.5, color='y')
plt.axvspan(baseline[0], baseline[1], alpha=.2, color='k')
plt.axvspan(measure[0], measure[1], alpha=.2, color='r')
plt.axis([None, None, -100, 100])
plt.subplot(222)
fig.grid()
plt.title("Evoked Current")
plt.axhline(0, color='k', ls='--')
plt.plot(sweepTimesMin, means, '.-')
fig.addTagLines(minutes=True)
plt.ylabel("Evoked Current (pA)")
plt.xlabel("Experiment Time (minutes)")
plt.margins(.1, .3)
plt.subplot(223)
fig.grid()
plt.title(mt.Ih.name)
plt.ylabel(mt.Ih.units)
plt.xlabel("Experiment Time (minutes)")
plt.plot(sweepTimesMin, mt.Ih.values, '.-')
plt.margins(.1, .3)
fig.addTagLines(minutes=True)
plt.subplot(224)
fig.grid()
plt.title(mt.Ra.name)
plt.ylabel(mt.Ra.units)
plt.xlabel("Experiment Time (minutes)")
plt.plot(sweepTimesMin, mt.Ra.values, '.-')
fig.addTagLines(minutes=True)
plt.margins(.1, .3)
plt.axis([None, None, 0, None])
def figureTestElectricalTrainVC(abf, fig, stimEpochNumber=3):
assert isinstance(abf, pyabf.ABF)
assert isinstance(fig, pyABFauto.figure.Figure)
mt = pyabf.tools.memtest.Memtest(abf)
optoPointOn = abf.sweepEpochs.p1s[stimEpochNumber]
optoTimeOn = optoPointOn * abf.dataSecPerPoint
optoPointOff = abf.sweepEpochs.p2s[stimEpochNumber]
optoTimeOff = optoPointOff * abf.dataSecPerPoint
baseline = [optoTimeOn - .02, optoTimeOn - .01]
displayPoint1 = int(optoPointOn - 0.03 * abf.dataRate)
displayPoint2 = displayPoint1 + int(0.3 * abf.dataRate)
sweepTimesSec = np.arange(abf.sweepCount) * abf.sweepIntervalSec
sweepTimesMin = sweepTimesSec / 60
plt.subplot(221)
fig.grid()
plt.title("Electrical Response (%d sweeps)" % abf.sweepCount)
for sweepNumber in abf.sweepList:
abf.setSweep(sweepNumber, baseline=baseline)
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
abf.sweepY[displayPoint1:displayPoint2],
alpha=.2, color='.5')
meanSweep = getMeanSweep(abf, baseline=baseline)
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
meanSweep[displayPoint1:displayPoint2],
color='b')
plt.ylabel(abf.sweepLabelY)
plt.xlabel(abf.sweepLabelX)
plt.margins(0, .1)
plt.axvspan(optoTimeOn, optoTimeOff, alpha=.5, color='y')
plt.axvspan(baseline[0], baseline[1], alpha=.2, color='k')
plt.axis([None, optoTimeOn + .05, -100, 100])
plt.subplot(222)
fig.grid()
plt.title("Electrical Response (%d sweeps)" % abf.sweepCount)
for sweepNumber in abf.sweepList:
abf.setSweep(sweepNumber, baseline=baseline)
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
abf.sweepY[displayPoint1:displayPoint2],
alpha=.2, color='.5')
meanSweep = getMeanSweep(abf, baseline=baseline)
plt.plot(abf.sweepX[displayPoint1:displayPoint2],
meanSweep[displayPoint1:displayPoint2],
color='b')
plt.ylabel(abf.sweepLabelY)
plt.xlabel(abf.sweepLabelX)
plt.margins(0, .1)
plt.axvspan(optoTimeOn, optoTimeOff, alpha=.5, color='y')
plt.axvspan(baseline[0], baseline[1], alpha=.2, color='k')
plt.axis([None, None, -100, 100])
plt.subplot(223)
fig.grid()
plt.title(mt.Ih.name)
plt.ylabel(mt.Ih.units)
plt.xlabel("Experiment Time (minutes)")
plt.plot(sweepTimesMin, mt.Ih.values, '.-')
plt.margins(.1, .3)
fig.addTagLines(minutes=True)
plt.subplot(224)
fig.grid()
plt.title(mt.Ra.name)
plt.ylabel(mt.Ra.units)
plt.xlabel("Experiment Time (minutes)")
plt.plot(sweepTimesMin, mt.Ra.values, '.-')
fig.addTagLines(minutes=True)
plt.margins(.1, .3)
plt.axis([None, None, 0, None])
def figureVariedPulseTime(abf: pyabf.ABF, fig: pyABFauto.figure.Figure):
epoch = 3
stimTimeStart = abf.sweepEpochs.p1s[epoch] / abf.sampleRate
viewIndex1 = int((stimTimeStart-.1) * abf.sampleRate)
viewIndex2 = int((stimTimeStart+.2) * abf.sampleRate)
plt.axhline(0, color='k', ls='--')
pyabf.filter.gaussian(abf, 1)
plt.grid(alpha=.5, ls='--')
for sweepIndex in range(abf.sweepCount):
abf.setSweep(sweepIndex, baseline=[stimTimeStart-.1, stimTimeStart])
xs = abf.sweepX[viewIndex1:viewIndex2]
ys = abf.sweepY[viewIndex1:viewIndex2]
plt.plot(xs, ys, alpha = .5, color='b')
plt.ylabel("Δ Current (pA)")
plt.xlabel("Time (seconds)")
plt.margins(0, .1)
| 34.850299
| 80
| 0.669588
| 1,335
| 11,640
| 5.838202
| 0.118352
| 0.07339
| 0.014113
| 0.022581
| 0.803823
| 0.775853
| 0.773415
| 0.773415
| 0.746472
| 0.743264
| 0
| 0.02928
| 0.193127
| 11,640
| 334
| 81
| 34.850299
| 0.800575
| 0.002406
| 0
| 0.797101
| 0
| 0
| 0.050495
| 0
| 0
| 0
| 0
| 0
| 0.032609
| 1
| 0.025362
| false
| 0.003623
| 0.025362
| 0
| 0.054348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c736044901013070efe75d83251b88afce18d2b1
| 74
|
py
|
Python
|
finrl_meta/env_execution_optimizing/order_execution_qlib/trade/observation/__init__.py
|
eitin-infant/FinRL-Meta
|
4c94011e58425796e7e2e5c1bf848afd65c828d6
|
[
"MIT"
] | 214
|
2021-11-08T17:06:11.000Z
|
2022-03-31T18:29:48.000Z
|
finrl_meta/env_execution_optimizing/order_execution_qlib/trade/observation/__init__.py
|
eitin-infant/FinRL-Meta
|
4c94011e58425796e7e2e5c1bf848afd65c828d6
|
[
"MIT"
] | 51
|
2021-11-14T19:11:02.000Z
|
2022-03-30T20:23:08.000Z
|
finrl_meta/env_execution_optimizing/order_execution_qlib/trade/observation/__init__.py
|
eitin-infant/FinRL-Meta
|
4c94011e58425796e7e2e5c1bf848afd65c828d6
|
[
"MIT"
] | 110
|
2021-11-03T07:41:40.000Z
|
2022-03-31T03:23:38.000Z
|
from .ppo_obs import *
from .teacher_obs import *
from .obs_rule import *
| 18.5
| 26
| 0.756757
| 12
| 74
| 4.416667
| 0.5
| 0.339623
| 0.490566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 74
| 3
| 27
| 24.666667
| 0.854839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c7409f5d28a4ed10f3a3987c2636ce31eb97e393
| 36,223
|
py
|
Python
|
tests/dhcpv4/kea_only/flexid/test_flex_id.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
tests/dhcpv4/kea_only/flexid/test_flex_id.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
tests/dhcpv4/kea_only/flexid/test_flex_id.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
"""Kea Hook flex-id testing"""
# pylint: disable=invalid-name,line-too-long
import pytest
import misc
import srv_msg
import srv_control
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_libreload():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.50')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.host_reservation_in_subnet_add_value('0', '0', 'address', '192.168.50.10')
srv_control.add_line('"host-reservation-identifiers": [ "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.open_control_channel()
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.send_ctrl_cmd_via_socket('{"command": "libreload","arguments": {}}')
# if reload works - classification should work without changes
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_reconfigure():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.50')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.host_reservation_in_subnet_add_value('0', '0', 'address', '192.168.50.10')
srv_control.add_line('"host-reservation-identifiers": [ "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.open_control_channel()
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.50')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.host_reservation_in_subnet_add_value('0', '0', 'address', '192.168.50.10')
srv_control.add_line('"host-reservation-identifiers": [ "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.open_control_channel()
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'reconfigured')
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_inside_pool():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.50')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.host_reservation_in_subnet_add_value('0', '0', 'address', '192.168.50.10')
srv_control.add_line('"host-reservation-identifiers": [ "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
# Client adds to the message vendor_class_id with value docsis3.0.
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ACK')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_option_content('Response', '1', None, 'value', '255.255.255.0')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_inside_pool_negative():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.50')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.host_reservation_in_subnet_add_value('0', '0', 'address', '192.168.50.10')
srv_control.add_line('"host-reservation-identifiers": [ "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
# Client adds to the message vendor_class_id with value docsis3.0.
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.1')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'NAK')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_outside_pool():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.9')
srv_control.host_reservation_in_subnet('hostname',
'reserved-hostname',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.host_reservation_in_subnet_add_value('0', '0', 'address', '192.168.50.10')
srv_control.add_line('"host-reservation-identifiers": [ "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ACK')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_option_content('Response', '1', None, 'value', '255.255.255.0')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_replace_mac_addr_inside_pool():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.50')
srv_control.host_reservation_in_subnet('address',
'192.168.50.10',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
srv_control.set_conf_parameter_global('match-client-id', 'false')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
# server should act normally, mac address should not be replaced
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ACK')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_option_content('Response', '1', None, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_copy_option('server_id')
srv_msg.client_sets_value('Client', 'ciaddr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
srv_msg.lease_file_contains('192.168.50.10,ff:01:02:03:ff:04,,4000')
srv_msg.lease_file_contains('192.168.50.10,ff:01:02:03:ff:04,,0')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_replace_client_id_release_fail():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.5')
srv_control.host_reservation_in_subnet('address',
'192.168.50.10',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
# enable matching client id
srv_control.set_conf_parameter_global('match-client-id', 'true')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
# server should act normally, mac address should not be replaced
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
# Client adds to the message vendor_class_id with value docsis3.0.
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'NAK')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_replace_client_id_release_1():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.5')
srv_control.host_reservation_in_subnet('address',
'192.168.50.10',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
# enable matching client id
srv_control.set_conf_parameter_global('match-client-id', 'true')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
# server should act normally, mac address should not be replaced
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ACK')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_option_content('Response', '1', None, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22:33')
srv_msg.client_copy_option('server_id')
srv_msg.client_sets_value('Client', 'ciaddr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
# client sends message without option 60
# Client adds to the message vendor_class_id with value docsis3.0.
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22:33:44:55')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.lease_file_doesnt_contain('ff:01:02:03:ff:04:11:22:33')
srv_msg.lease_file_contains('192.168.50.10,ff:01:02:03:ff:04,00:64:6f:63:73:69:73:33:2e:30,4000')
srv_msg.lease_file_doesnt_contain('192.168.50.10,ff:01:02:03:ff:04,00:64:6f:63:73:69:73:33:2e:30,0')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_replace_client_id_release_2():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.5')
srv_control.host_reservation_in_subnet('address',
'192.168.50.10',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
# enable matching client id
srv_control.set_conf_parameter_global('match-client-id', 'true')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
# server should act normally, mac address should not be replaced
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ACK')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_option_content('Response', '1', None, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22:33')
srv_msg.client_copy_option('server_id')
srv_msg.client_sets_value('Client', 'ciaddr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22:33:44:55')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.lease_file_doesnt_contain('ff:01:02:03:ff:04:11:22:33')
srv_msg.lease_file_contains('192.168.50.10,ff:01:02:03:ff:04,00:64:6f:63:73:69:73:33:2e:30,4000')
srv_msg.lease_file_contains('192.168.50.10,ff:01:02:03:ff:04,00:64:6f:63:73:69:73:33:2e:30,0')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_replace_client_id_renew_1():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.5')
srv_control.host_reservation_in_subnet('address',
'192.168.50.10',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
# enable matching client id
srv_control.set_conf_parameter_global('match-client-id', 'true')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
# server should act normally, mac address should not be replaced
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ACK')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_option_content('Response', '1', None, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22:33:44:55')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '61')
srv_msg.response_check_include_option('Response', None, '54')
srv_msg.lease_file_doesnt_contain('ff:01:02:03:ff:04:11:22:33')
srv_msg.lease_file_contains('192.168.50.10,ff:01:02:03:ff:04,00:64:6f:63:73:69:73:33:2e:30,4000')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_replace_client_id_renew_2():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.5')
srv_control.host_reservation_in_subnet('address',
'192.168.50.10',
'0',
'flex-id',
'\'docsis3.0\'')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
# enable matching client id
srv_control.set_conf_parameter_global('match-client-id', 'true')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
# server should act normally, mac address should not be replaced
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ACK')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_option_content('Response', '1', None, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', 'ff:01:02:03:ff:04:11:22')
# Client adds to the message vendor_class_id with value docsis3.0.
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', 'NOT ', 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '61')
srv_msg.response_check_include_option('Response', None, '54')
srv_msg.lease_file_doesnt_contain('ff:01:02:03:ff:04:11:22:33')
srv_msg.lease_file_contains('192.168.50.10,ff:01:02:03:ff:04,00:64:6f:63:73:69:73:33:2e:30,4000')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_mysql_1():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.5')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
# enable matching client id
srv_control.set_conf_parameter_global('match-client-id', 'true')
srv_control.enable_db_backend_reservation('MySQL')
# 646f63736973332e30 = docsis3.0
srv_control.new_db_backend_reservation('MySQL', 'flex-id', '646f63736973332e30')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'MySQL', '1')
srv_control.update_db_backend_reservation('ipv4_address', '192.168.50.10', 'MySQL', '1')
srv_control.update_db_backend_reservation('dhcp4_subnet_id', '1', 'MySQL', '1')
srv_control.upload_db_reservation('MySQL')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
# Pause the Test.
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ACK')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_option_content('Response', '1', None, 'value', '255.255.255.0')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_mysql_negative():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.5')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
# enable matching client id
srv_control.set_conf_parameter_global('match-client-id', 'true')
srv_control.enable_db_backend_reservation('MySQL')
# 646f63736973332e30 = docsis3.0
srv_control.new_db_backend_reservation('MySQL', 'flex-id', '646f63736973332e30')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'MySQL', '1')
srv_control.update_db_backend_reservation('ipv4_address', '192.168.50.10', 'MySQL', '1')
srv_control.update_db_backend_reservation('dhcp4_subnet_id', '1', 'MySQL', '1')
srv_control.upload_db_reservation('MySQL')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'NAK')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_pgsql_1():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.5')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
# enable matching client id
srv_control.set_conf_parameter_global('match-client-id', 'true')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'flex-id', '646f63736973332e30')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'PostgreSQL', '1')
srv_control.update_db_backend_reservation('ipv4_address', '192.168.50.10', 'PostgreSQL', '1')
srv_control.update_db_backend_reservation('dhcp4_subnet_id', '1', 'PostgreSQL', '1')
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
# Pause the Test.
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ACK')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_option_content('Response', '1', None, 'value', '255.255.255.0')
@pytest.mark.v4
@pytest.mark.flexid
@pytest.mark.kea_only
def test_v4_hooks_flexid_pgsql_negative():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.5')
srv_control.add_line('"host-reservation-identifiers": ["hw-address", "flex-id" ]')
srv_control.add_hooks('libdhcp_flex_id.so')
srv_control.add_parameter_to_hook('1', 'identifier-expression', 'option[60].hex')
srv_control.add_parameter_to_hook('1', 'replace-client-id', 'true')
# enable matching client id
srv_control.set_conf_parameter_global('match-client-id', 'true')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'flex-id', '646f63736973332e30')
srv_control.update_db_backend_reservation('hostname', 'reserved-hostname', 'PostgreSQL', '1')
srv_control.update_db_backend_reservation('ipv4_address', '192.168.50.10', 'PostgreSQL', '1')
srv_control.update_db_backend_reservation('dhcp4_subnet_id', '1', 'PostgreSQL', '1')
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
# Pause the Test.
misc.test_procedure()
srv_msg.client_does_include_with_value('vendor_class_id', 'docsis3.0')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'OFFER')
srv_msg.response_check_content('Response', None, 'yiaddr', '192.168.50.10')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'NAK')
| 46.981842
| 104
| 0.689479
| 5,385
| 36,223
| 4.289879
| 0.033426
| 0.06675
| 0.079997
| 0.030302
| 0.987533
| 0.987533
| 0.98697
| 0.98697
| 0.98697
| 0.985022
| 0
| 0.078395
| 0.158711
| 36,223
| 770
| 105
| 47.042857
| 0.679661
| 0.033542
| 0
| 0.95315
| 0
| 0.017771
| 0.272795
| 0.063703
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024233
| true
| 0.059774
| 0.006462
| 0
| 0.030695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
c741b7fb036f13dbe223afaab4080b4db30e60bd
| 29,723
|
py
|
Python
|
pytest_docker_git_fixtures/fixtures.py
|
crashvb/pytest-docker-git-fixtures
|
4fd9ac289ab68c7d036e633125effd1db41304ef
|
[
"Apache-2.0"
] | null | null | null |
pytest_docker_git_fixtures/fixtures.py
|
crashvb/pytest-docker-git-fixtures
|
4fd9ac289ab68c7d036e633125effd1db41304ef
|
[
"Apache-2.0"
] | null | null | null |
pytest_docker_git_fixtures/fixtures.py
|
crashvb/pytest-docker-git-fixtures
|
4fd9ac289ab68c7d036e633125effd1db41304ef
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# pylint: disable=redefined-outer-name,too-many-arguments,too-many-locals
"""The actual fixtures, you found them ;)."""
import logging
import itertools
from base64 import b64encode
from functools import partial
from pathlib import Path
from ssl import create_default_context, SSLContext
from string import Template
from time import sleep, time
from typing import Dict, Generator, List, NamedTuple
import pytest
from lovely.pytest.docker.compose import Services
from _pytest.tmpdir import TempPathFactory
from .utils import (
check_url_secure,
DOCKER_GIT_SERVICE,
DOCKER_GIT_SERVICE_PATTERN,
generate_cacerts,
generate_htpasswd,
generate_keypair,
get_docker_compose_user_defined,
get_embedded_file,
get_user_defined_file,
start_service,
)
# Caching is needed, as singular-fixtures and list-fixtures will conflict at scale_factor=1
# This appears to only matter when attempting to start the docker secure GIT service
# for the second time.
CACHE = {}
LOGGER = logging.getLogger(__name__)
class DockerGITCerts(NamedTuple):
# pylint: disable=missing-class-docstring
ca_certificate: Path
ca_private_key: Path
certificate: Path
private_key: Path
class DockerGITInsecure(NamedTuple):
# pylint: disable=missing-class-docstring
created_repos: List[str]
docker_compose: Path
endpoint: str
mirrored_repos: List[str]
service_name: str
# Note: NamedTuple does not support inheritance :(
class DockerGITSecure(NamedTuple):
# pylint: disable=missing-class-docstring
auth_header: Dict[str, str]
cacerts: Path
certs: DockerGITCerts
created_repos: List[str]
docker_compose: Path
endpoint: str
htpasswd: Path
mirrored_repos: List[str]
password: str
service_name: str
ssl_context: SSLContext
username: str
def _docker_compose_insecure(
*,
docker_compose_files: List[str],
scale_factor: int,
tmp_path_factory: TempPathFactory,
) -> Generator[List[Path], None, None]:
"""
Provides the location of the docker-compose configuration file containing the insecure docker GIT service.
"""
cache_key = _docker_compose_insecure.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
service_name = DOCKER_GIT_SERVICE_PATTERN.format("insecure", i)
chain = itertools.chain(
get_docker_compose_user_defined(docker_compose_files, service_name),
# TODO: lovely-docker-compose uses the file for teardown ...
get_embedded_file(
tmp_path_factory, delete_after=False, name="docker-compose.yml"
),
)
for path in chain:
result.append(path)
break
else:
LOGGER.warning("Unable to find docker compose for: %s", service_name)
result.append("-unknown-")
CACHE[cache_key] = result
yield result
@pytest.fixture(scope="session")
def docker_compose_insecure(
docker_compose_files: List[str], tmp_path_factory: TempPathFactory
) -> Generator[Path, None, None]:
"""
Provides the location of the docker-compose configuration file containing the insecure docker GIT service.
"""
for lst in _docker_compose_insecure(
docker_compose_files=docker_compose_files,
scale_factor=1,
tmp_path_factory=tmp_path_factory,
):
yield lst[0]
@pytest.fixture(scope="session")
def docker_compose_insecure_list(
docker_compose_files: List[str],
pdrf_scale_factor: int,
tmp_path_factory: TempPathFactory,
) -> Generator[List[Path], None, None]:
"""
Provides the location of the docker-compose configuration file containing the insecure docker GIT service.
"""
yield from _docker_compose_insecure(
docker_compose_files=docker_compose_files,
scale_factor=pdrf_scale_factor,
tmp_path_factory=tmp_path_factory,
)
def _docker_compose_secure(
*,
docker_compose_files: List[str],
scale_factor: int,
tmp_path_factory: TempPathFactory,
) -> Generator[List[Path], None, None]:
"""
Provides the location of the templated docker-compose configuration file containing the secure docker GIT
service.
"""
cache_key = _docker_compose_secure.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
service_name = DOCKER_GIT_SERVICE_PATTERN.format("secure", i)
chain = itertools.chain(
get_docker_compose_user_defined(docker_compose_files, service_name),
get_embedded_file(
tmp_path_factory, delete_after=False, name="docker-compose.yml"
),
)
for path in chain:
result.append(path)
break
else:
LOGGER.warning("Unable to find docker compose for: %s", service_name)
result.append("-unknown-")
CACHE[cache_key] = result
yield result
@pytest.fixture(scope="session")
def docker_compose_secure(
docker_compose_files: List[str], tmp_path_factory: TempPathFactory
) -> Generator[Path, None, None]:
"""
Provides the location of the templated docker-compose configuration file containing the secure docker GIT
service.
"""
for lst in _docker_compose_secure(
docker_compose_files=docker_compose_files,
scale_factor=1,
tmp_path_factory=tmp_path_factory,
):
yield lst[0]
@pytest.fixture(scope="session")
def docker_compose_secure_list(
docker_compose_files: List[str],
pdrf_scale_factor: int,
tmp_path_factory: TempPathFactory,
) -> Generator[List[Path], None, None]:
"""
Provides the location of the templated docker-compose configuration file containing the secure docker GIT
service.
"""
yield from _docker_compose_secure(
docker_compose_files=docker_compose_files,
scale_factor=pdrf_scale_factor,
tmp_path_factory=tmp_path_factory,
)
def _docker_git_auth_header(
*,
docker_git_password_list: List[str],
docker_git_username_list: List[str],
scale_factor: int,
) -> List[Dict[str, str]]:
"""Provides an HTTP basic authentication header containing credentials for the secure docker GIT service."""
cache_key = _docker_git_auth_header.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
auth = b64encode(
f"{docker_git_username_list[i]}:{docker_git_password_list[i]}".encode(
"utf-8"
)
).decode("utf-8")
result.append({"Authorization": f"Basic {auth}"})
CACHE[cache_key] = result
return result
@pytest.fixture(scope="session")
def docker_git_auth_header(
docker_git_password: str, docker_git_username: str
) -> Dict[str, str]:
"""Provides an HTTP basic authentication header containing credentials for the secure docker GIT service."""
return _docker_git_auth_header(
docker_git_password_list=[docker_git_password],
docker_git_username_list=[docker_git_username],
scale_factor=1,
)[0]
@pytest.fixture(scope="session")
def docker_git_auth_header_list(
docker_git_password_list: List[str],
docker_git_username_list: List[str],
pdrf_scale_factor: int,
) -> List[Dict[str, str]]:
"""Provides an HTTP basic authentication header containing credentials for the secure docker GIT service."""
return _docker_git_auth_header(
docker_git_password_list=docker_git_password_list,
docker_git_username_list=docker_git_username_list,
scale_factor=pdrf_scale_factor,
)
def _docker_git_cacerts(
*,
docker_git_certs_list: List[DockerGITCerts],
pytestconfig: "_pytest.config.Config",
scale_factor: int,
tmp_path_factory: TempPathFactory,
) -> Generator[List[Path], None, None]:
"""
Provides the location of a temporary CA certificate trust store that contains the certificate of the secure docker
GIT service.
"""
cache_key = _docker_git_cacerts.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
chain = itertools.chain(
get_user_defined_file(pytestconfig, "cacerts"),
generate_cacerts(
tmp_path_factory,
certificate=docker_git_certs_list[i].ca_certificate,
),
)
for path in chain:
result.append(path)
break
else:
LOGGER.warning("Unable to find or generate cacerts!")
result.append("-unknown-")
CACHE[cache_key] = result
yield result
@pytest.fixture(scope="session")
def docker_git_cacerts(
docker_git_certs: DockerGITCerts,
pytestconfig: "_pytest.config.Config",
tmp_path_factory: TempPathFactory,
) -> Generator[Path, None, None]:
"""
Provides the location of a temporary CA certificate trust store that contains the certificate of the secure docker
GIT service.
"""
for lst in _docker_git_cacerts(
docker_git_certs_list=[docker_git_certs],
pytestconfig=pytestconfig,
scale_factor=1,
tmp_path_factory=tmp_path_factory,
):
yield lst[0]
@pytest.fixture(scope="session")
def docker_git_cacerts_list(
docker_git_certs_list: List[DockerGITCerts],
pdrf_scale_factor: int,
pytestconfig: "_pytest.config.Config",
tmp_path_factory: TempPathFactory,
) -> Generator[List[Path], None, None]:
"""
Provides the location of a temporary CA certificate trust store that contains the certificate of the secure docker
GIT service.
"""
yield from _docker_git_cacerts(
docker_git_certs_list=docker_git_certs_list,
pytestconfig=pytestconfig,
scale_factor=pdrf_scale_factor,
tmp_path_factory=tmp_path_factory,
)
def _docker_git_certs(
*, scale_factor: int, tmp_path_factory: TempPathFactory
) -> Generator[List[DockerGITCerts], None, None]:
"""Provides the location of temporary certificate and private key files for the secure docker GIT service."""
# TODO: Augment to allow for reading certificates from /test ...
cache_key = _docker_git_certs.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
tmp_path = tmp_path_factory.mktemp(__name__)
keypair = generate_keypair()
docker_git_cert = DockerGITCerts(
ca_certificate=tmp_path.joinpath(f"{DOCKER_GIT_SERVICE}-ca-{i}.crt"),
ca_private_key=tmp_path.joinpath(f"{DOCKER_GIT_SERVICE}-ca-{i}.key"),
certificate=tmp_path.joinpath(f"{DOCKER_GIT_SERVICE}-{i}.crt"),
private_key=tmp_path.joinpath(f"{DOCKER_GIT_SERVICE}-{i}.key"),
)
docker_git_cert.ca_certificate.write_bytes(keypair.ca_certificate)
docker_git_cert.ca_private_key.write_bytes(keypair.ca_private_key)
docker_git_cert.certificate.write_bytes(keypair.certificate)
docker_git_cert.private_key.write_bytes(keypair.private_key)
result.append(docker_git_cert)
CACHE[cache_key] = result
yield result
for docker_git_cert in result:
docker_git_cert.ca_certificate.unlink(missing_ok=True)
docker_git_cert.ca_private_key.unlink(missing_ok=True)
docker_git_cert.certificate.unlink(missing_ok=True)
docker_git_cert.private_key.unlink(missing_ok=True)
@pytest.fixture(scope="session")
def docker_git_certs(
tmp_path_factory: TempPathFactory,
) -> Generator[DockerGITCerts, None, None]:
"""Provides the location of temporary certificate and private key files for the secure docker GIT service."""
for lst in _docker_git_certs(scale_factor=1, tmp_path_factory=tmp_path_factory):
yield lst[0]
@pytest.fixture(scope="session")
def docker_git_certs_list(
pdrf_scale_factor: int, tmp_path_factory: TempPathFactory
) -> Generator[List[DockerGITCerts], None, None]:
"""Provides the location of temporary certificate and private key files for the secure docker GIT service."""
yield from _docker_git_certs(
scale_factor=pdrf_scale_factor, tmp_path_factory=tmp_path_factory
)
def _docker_git_htpasswd(
*,
docker_git_password_list: List[str],
docker_git_username_list: List[str],
pytestconfig: "_pytest.config.Config",
scale_factor: int,
tmp_path_factory: TempPathFactory,
) -> Generator[List[Path], None, None]:
"""Provides the location of the htpasswd file for the secure GIT service."""
cache_key = _docker_git_htpasswd.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
chain = itertools.chain(
get_user_defined_file(pytestconfig, "htpasswd"),
generate_htpasswd(
tmp_path_factory,
username=docker_git_username_list[i],
password=docker_git_password_list[i],
),
)
for path in chain:
result.append(path)
break
else:
LOGGER.warning("Unable to find or generate htpasswd!")
result.append("-unknown-")
CACHE[cache_key] = result
yield result
@pytest.fixture(scope="session")
def docker_git_htpasswd(
docker_git_password: str,
docker_git_username: str,
pytestconfig: "_pytest.config.Config",
tmp_path_factory: TempPathFactory,
) -> Generator[Path, None, None]:
"""Provides the location of the htpasswd file for the secure GIT service."""
for lst in _docker_git_htpasswd(
docker_git_password_list=[docker_git_password],
docker_git_username_list=[docker_git_username],
pytestconfig=pytestconfig,
scale_factor=1,
tmp_path_factory=tmp_path_factory,
):
yield lst[0]
@pytest.fixture(scope="session")
def docker_git_htpasswd_list(
docker_git_password_list: List[str],
docker_git_username_list: List[str],
pdrf_scale_factor: int,
pytestconfig: "_pytest.config.Config",
tmp_path_factory: TempPathFactory,
) -> Generator[List[Path], None, None]:
"""Provides the location of the htpasswd file for the secure GIT service."""
yield from _docker_git_htpasswd(
docker_git_username_list=docker_git_username_list,
docker_git_password_list=docker_git_password_list,
pytestconfig=pytestconfig,
scale_factor=pdrf_scale_factor,
tmp_path_factory=tmp_path_factory,
)
def _docker_git_insecure(
*,
docker_compose_insecure_list: List[Path],
docker_services: Services,
request,
scale_factor: int,
tmp_path_factory: TempPathFactory,
) -> Generator[List[DockerGITInsecure], None, None]:
"""Provides the endpoint of a local, mutable, insecure, docker GIT SCM."""
cache_key = _docker_git_insecure.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
service_name = DOCKER_GIT_SERVICE_PATTERN.format("insecure", i)
tmp_path = tmp_path_factory.mktemp(__name__)
create_repos = []
mirror_repos = []
if i == 0:
LOGGER.debug("Initializing repositories in %s [%d] ...", service_name, i)
create_repos = _get_create_repo(request)
LOGGER.debug(" creating :")
for repo in create_repos:
LOGGER.debug(" %s", repo)
mirror_repos = _get_mirror_repo(request)
LOGGER.debug(" mirroring :")
for repo in mirror_repos:
LOGGER.debug(" %s", repo)
# Create a secure GIT service from the docker compose template ...
path_docker_compose = tmp_path.joinpath(f"docker-compose-{i}.yml")
template = Template(docker_compose_insecure_list[i].read_text("utf-8"))
path_docker_compose.write_text(
template.substitute(
{
"CONTAINER_NAME": service_name,
# Note: Needed to correctly populate the embedded, consolidated, service template ...
"PATH_CERTIFICATE": "/dev/null",
"PATH_HTPASSWD": "/dev/null",
"PATH_KEY": "/dev/null",
"PDGF_CREATE_REPOS": ",".join(create_repos),
"PDGF_MIRROR_REPOS": ",".join(mirror_repos),
}
),
"utf-8",
)
LOGGER.debug("Starting insecure docker GIT service [%d] ...", i)
LOGGER.debug(" docker-compose : %s", path_docker_compose)
LOGGER.debug(" service name : %s", service_name)
endpoint = start_service(
docker_services,
docker_compose=path_docker_compose,
port=80,
service_name=service_name,
)
LOGGER.debug("Insecure docker GIT endpoint [%d]: %s", i, endpoint)
result.append(
DockerGITInsecure(
created_repos=create_repos,
docker_compose=path_docker_compose,
endpoint=endpoint,
mirrored_repos=mirror_repos,
service_name=service_name,
)
)
CACHE[cache_key] = result
yield result
@pytest.fixture(scope="session")
def docker_git_insecure(
docker_compose_insecure: Path,
docker_services: Services,
request,
tmp_path_factory: TempPathFactory,
) -> Generator[DockerGITInsecure, None, None]:
"""Provides the endpoint of a local, mutable, insecure, docker GIT SCM."""
for lst in _docker_git_insecure(
docker_compose_insecure_list=[docker_compose_insecure],
docker_services=docker_services,
request=request,
scale_factor=1,
tmp_path_factory=tmp_path_factory,
):
yield lst[0]
@pytest.fixture(scope="session")
def docker_git_insecure_list(
docker_compose_insecure_list: List[Path],
docker_services: Services,
pdrf_scale_factor: int,
request,
tmp_path_factory: TempPathFactory,
) -> Generator[List[DockerGITInsecure], None, None]:
"""Provides the endpoint of a local, mutable, insecure, docker GIT SCM."""
yield from _docker_git_insecure(
docker_compose_insecure_list=docker_compose_insecure_list,
docker_services=docker_services,
request=request,
scale_factor=pdrf_scale_factor,
tmp_path_factory=tmp_path_factory,
)
def _docker_git_password(*, scale_factor: int) -> List[str]:
"""Provides the password to use for authentication to the secure GIT service."""
cache_key = _docker_git_password.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
result.append(f"pytest.password.{time()}")
sleep(0.05)
CACHE[cache_key] = result
return result
@pytest.fixture(scope="session")
def docker_git_password() -> str:
"""Provides the password to use for authentication to the secure GIT service."""
return _docker_git_password(scale_factor=1)[0]
@pytest.fixture(scope="session")
def docker_git_password_list(pdrf_scale_factor: int) -> List[str]:
"""Provides the password to use for authentication to the secure GIT service."""
return _docker_git_password(scale_factor=pdrf_scale_factor)
def _docker_git_secure(
*,
docker_compose_secure_list: List[Path],
docker_git_auth_header_list: List[Dict[str, str]],
docker_git_cacerts_list: List[Path],
docker_git_certs_list: List[DockerGITCerts],
docker_git_htpasswd_list: List[Path],
docker_git_password_list: List[str],
docker_git_ssl_context_list: List[SSLContext],
docker_git_username_list: List[str],
docker_services: Services,
request,
scale_factor: int,
tmp_path_factory: TempPathFactory,
) -> Generator[List[DockerGITSecure], None, None]:
"""Provides the endpoint of a local, mutable, secure, docker GIT SCM."""
cache_key = _docker_git_secure.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
service_name = DOCKER_GIT_SERVICE_PATTERN.format("secure", i)
tmp_path = tmp_path_factory.mktemp(__name__)
create_repos = []
mirror_repos = []
if i == 0:
create_repos = _get_create_repo(request)
LOGGER.debug(" creating :")
for repo in create_repos:
LOGGER.debug(" %s", repo)
mirror_repos = _get_mirror_repo(request)
LOGGER.debug(" mirroring :")
for repo in mirror_repos:
LOGGER.debug(" %s", repo)
# Create a secure GIT service from the docker compose template ...
path_docker_compose = tmp_path.joinpath(f"docker-compose-{i}.yml")
template = Template(docker_compose_secure_list[i].read_text("utf-8"))
path_docker_compose.write_text(
template.substitute(
{
"CONTAINER_NAME": service_name,
"PATH_CERTIFICATE": docker_git_certs_list[i].certificate,
"PATH_HTPASSWD": docker_git_htpasswd_list[i],
"PATH_KEY": docker_git_certs_list[i].private_key,
"PDGF_CREATE_REPOS": ",".join(create_repos),
"PDGF_MIRROR_REPOS": ",".join(mirror_repos),
}
),
"utf-8",
)
LOGGER.debug("Starting secure docker GIT service [%d] ...", i)
LOGGER.debug(" docker-compose : %s", path_docker_compose)
LOGGER.debug(" ca certificate : %s", docker_git_certs_list[i].ca_certificate)
LOGGER.debug(" certificate : %s", docker_git_certs_list[i].certificate)
LOGGER.debug(" htpasswd : %s", docker_git_htpasswd_list[i])
LOGGER.debug(" private key : %s", docker_git_certs_list[i].private_key)
LOGGER.debug(" password : %s", docker_git_password_list[i])
LOGGER.debug(" service name : %s", service_name)
LOGGER.debug(" username : %s", docker_git_username_list[i])
check_server = partial(
check_url_secure,
auth_header=docker_git_auth_header_list[i],
ssl_context=docker_git_ssl_context_list[i],
)
endpoint = start_service(
docker_services,
check_server=check_server,
docker_compose=path_docker_compose,
port=443,
service_name=service_name,
)
LOGGER.debug("Secure docker GIT endpoint [%d]: %s", i, endpoint)
result.append(
DockerGITSecure(
auth_header=docker_git_auth_header_list[i],
cacerts=docker_git_cacerts_list[i],
certs=docker_git_certs_list[i],
created_repos=create_repos,
docker_compose=path_docker_compose,
endpoint=endpoint,
htpasswd=docker_git_htpasswd_list[i],
mirrored_repos=mirror_repos,
password=docker_git_password_list[i],
service_name=service_name,
ssl_context=docker_git_ssl_context_list[i],
username=docker_git_username_list[i],
)
)
CACHE[cache_key] = result
yield result
@pytest.fixture(scope="session")
def docker_git_secure(
docker_compose_secure: Path,
docker_git_auth_header: Dict[str, str],
docker_git_cacerts: Path,
docker_git_certs: DockerGITCerts,
docker_git_htpasswd: Path,
docker_git_password: str,
docker_git_ssl_context: SSLContext,
docker_git_username: str,
docker_services: Services,
request,
tmp_path_factory: TempPathFactory,
) -> Generator[DockerGITSecure, None, None]:
"""Provides the endpoint of a local, mutable, secure, docker GIT SCM."""
for lst in _docker_git_secure(
docker_compose_secure_list=[docker_compose_secure],
docker_git_auth_header_list=[docker_git_auth_header],
docker_git_cacerts_list=[docker_git_cacerts],
docker_git_certs_list=[docker_git_certs],
docker_git_htpasswd_list=[docker_git_htpasswd],
docker_git_password_list=[docker_git_password],
docker_git_ssl_context_list=[docker_git_ssl_context],
docker_git_username_list=[docker_git_username],
docker_services=docker_services,
request=request,
scale_factor=1,
tmp_path_factory=tmp_path_factory,
):
yield lst[0]
@pytest.fixture(scope="session")
def docker_git_secure_list(
docker_compose_secure_list: List[Path],
docker_git_auth_header_list: List[Dict[str, str]],
docker_git_cacerts_list: List[Path],
docker_git_certs_list: List[DockerGITCerts],
docker_git_htpasswd_list: List[Path],
docker_git_password_list: List[str],
docker_git_ssl_context_list: List[SSLContext],
docker_git_username_list: List[str],
docker_services: Services,
pdrf_scale_factor: int,
request,
tmp_path_factory: TempPathFactory,
) -> Generator[List[DockerGITSecure], None, None]:
"""Provides the endpoint of a local, mutable, secure, docker GIT SCM."""
yield from _docker_git_secure(
docker_compose_secure_list=docker_compose_secure_list,
docker_git_auth_header_list=docker_git_auth_header_list,
docker_git_cacerts_list=docker_git_cacerts_list,
docker_git_certs_list=docker_git_certs_list,
docker_git_htpasswd_list=docker_git_htpasswd_list,
docker_git_password_list=docker_git_password_list,
docker_git_ssl_context_list=docker_git_ssl_context_list,
docker_git_username_list=docker_git_username_list,
docker_services=docker_services,
scale_factor=pdrf_scale_factor,
request=request,
tmp_path_factory=tmp_path_factory,
)
def _docker_git_ssl_context(
*, docker_git_cacerts_list: List[Path], scale_factor: int
) -> List[SSLContext]:
"""
Provides an SSLContext referencing the temporary CA certificate trust store that contains the certificate of the
secure docker GIT service.
"""
cache_key = _docker_git_ssl_context.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
result.append(create_default_context(cafile=str(docker_git_cacerts_list[i])))
CACHE[cache_key] = result
return result
@pytest.fixture(scope="session")
def docker_git_ssl_context(docker_git_cacerts: Path) -> SSLContext:
"""
Provides an SSLContext referencing the temporary CA certificate trust store that contains the certificate of the
secure docker GIT service.
"""
return _docker_git_ssl_context(
docker_git_cacerts_list=[docker_git_cacerts], scale_factor=1
)[0]
@pytest.fixture(scope="session")
def docker_git_ssl_context_list(
docker_git_cacerts_list: List[Path],
pdrf_scale_factor: int,
) -> List[SSLContext]:
"""
Provides an SSLContext referencing the temporary CA certificate trust store that contains the certificate of the
secure docker GIT service.
"""
return _docker_git_ssl_context(
docker_git_cacerts_list=docker_git_cacerts_list,
scale_factor=pdrf_scale_factor,
)
def _docker_git_username(*, scale_factor: int) -> List[str]:
"""Retrieve the name of the user to use for authentication to the secure GIT service."""
cache_key = _docker_git_username.__name__
result = CACHE.get(cache_key, [])
for i in range(scale_factor):
if i < len(result):
continue
result.append(f"pytest.username.{time()}")
sleep(0.05)
CACHE[cache_key] = result
return result
@pytest.fixture(scope="session")
def docker_git_username() -> str:
"""Retrieve the name of the user to use for authentication to the secure GIT service."""
return _docker_git_username(scale_factor=1)[0]
@pytest.fixture(scope="session")
def docker_git_username_list(
pdrf_scale_factor: int,
) -> List[str]:
"""Retrieve the name of the user to use for authentication to the secure GIT service."""
return _docker_git_username(scale_factor=pdrf_scale_factor)
@pytest.fixture(scope="session")
def pdrf_scale_factor() -> int:
"""Provides the number enumerated instances to be instantiated."""
return 1
def _get_create_repo(request) -> List[str]:
"""
Retrieves the list of all GIT repositories to be created.
Args:
request: The pytest requests object from which to retrieve the marks.
Returns: The list of GIT repositories to be created.
"""
names = request.config.getoption("--create-repo", [])
# names.extend(request.node.get_closest_marker("create_repo", []))
# * Split ',' separated lists
# * Remove duplicates - see conftest.py::pytest_collection_modifyitems()
names = [name for i in names for name in i.split(",")]
return list(set(names))
def _get_mirror_repo(request) -> List[str]:
"""
Retrieves the list of all GIT repositories to be mirrored.
Args:
request: The pytest requests object from which to retrieve the marks.
Returns: The list of GIT repositories to be mirrored.
"""
uris = request.config.getoption("--mirror-repo", [])
# uris.extend(request.node.get_closest_marker("mirror_repo", []))
# * Split ',' separated lists
# * Remove duplicates - see conftest.py::pytest_collection_modifyitems()
uris = [uri for i in uris for uri in i.split(",")]
return list(set(uris))
| 34.601863
| 118
| 0.678868
| 3,650
| 29,723
| 5.199726
| 0.071233
| 0.103377
| 0.041309
| 0.030297
| 0.865799
| 0.831972
| 0.78629
| 0.754044
| 0.710786
| 0.653986
| 0
| 0.002142
| 0.230259
| 29,723
| 858
| 119
| 34.642191
| 0.827396
| 0.161256
| 0
| 0.649612
| 0
| 0
| 0.064669
| 0.016106
| 0
| 0
| 0
| 0.002331
| 0
| 1
| 0.055814
| false
| 0.072868
| 0.020155
| 0
| 0.136434
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c75360268e6fd3afd6cbfb4ecf09a4de5cbf138a
| 7,320
|
py
|
Python
|
tests/test_sampler.py
|
zhangwenwen/mmdetection
|
ccb4e58a4f3aa4fbda8f880c8c53fac561549abc
|
[
"Apache-2.0"
] | 1,467
|
2020-03-24T01:38:24.000Z
|
2022-03-31T03:02:05.000Z
|
tests/test_sampler.py
|
ZhiyuanChen/mmdetection
|
bdfa60d6bf3f09111f9620fcb279ef07a6ba6a09
|
[
"Apache-2.0"
] | 208
|
2020-03-26T16:24:23.000Z
|
2022-03-30T13:12:07.000Z
|
tests/test_sampler.py
|
ZhiyuanChen/mmdetection
|
bdfa60d6bf3f09111f9620fcb279ef07a6ba6a09
|
[
"Apache-2.0"
] | 300
|
2020-03-24T03:55:02.000Z
|
2022-03-29T19:08:07.000Z
|
import torch
from mmdet.core import MaxIoUAssigner
from mmdet.core.bbox.samplers import OHEMSampler, RandomSampler
def test_random_sampler():
assigner = MaxIoUAssigner(
pos_iou_thr=0.5,
neg_iou_thr=0.5,
ignore_iof_thr=0.5,
ignore_wrt_candidates=False,
)
bboxes = torch.FloatTensor([
[0, 0, 10, 10],
[10, 10, 20, 20],
[5, 5, 15, 15],
[32, 32, 38, 42],
])
gt_bboxes = torch.FloatTensor([
[0, 0, 10, 9],
[0, 10, 10, 19],
])
gt_labels = torch.LongTensor([1, 2])
gt_bboxes_ignore = torch.Tensor([
[30, 30, 40, 40],
])
assign_result = assigner.assign(
bboxes,
gt_bboxes,
gt_bboxes_ignore=gt_bboxes_ignore,
gt_labels=gt_labels)
sampler = RandomSampler(
num=10, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=True)
sample_result = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels)
assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds)
assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds)
def test_random_sampler_empty_gt():
assigner = MaxIoUAssigner(
pos_iou_thr=0.5,
neg_iou_thr=0.5,
ignore_iof_thr=0.5,
ignore_wrt_candidates=False,
)
bboxes = torch.FloatTensor([
[0, 0, 10, 10],
[10, 10, 20, 20],
[5, 5, 15, 15],
[32, 32, 38, 42],
])
gt_bboxes = torch.empty(0, 4)
gt_labels = torch.empty(0, ).long()
assign_result = assigner.assign(bboxes, gt_bboxes, gt_labels=gt_labels)
sampler = RandomSampler(
num=10, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=True)
sample_result = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels)
assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds)
assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds)
def test_random_sampler_empty_pred():
assigner = MaxIoUAssigner(
pos_iou_thr=0.5,
neg_iou_thr=0.5,
ignore_iof_thr=0.5,
ignore_wrt_candidates=False,
)
bboxes = torch.empty(0, 4)
gt_bboxes = torch.FloatTensor([
[0, 0, 10, 9],
[0, 10, 10, 19],
])
gt_labels = torch.LongTensor([1, 2])
assign_result = assigner.assign(bboxes, gt_bboxes, gt_labels=gt_labels)
sampler = RandomSampler(
num=10, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=True)
sample_result = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels)
assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds)
assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds)
def _context_for_ohem():
try:
from test_forward import _get_detector_cfg
except ImportError:
# Hack: grab testing utils from test_forward to make a context for ohem
import sys
from os.path import dirname
sys.path.insert(0, dirname(__file__))
from test_forward import _get_detector_cfg
model, train_cfg, test_cfg = _get_detector_cfg(
'faster_rcnn_ohem_r50_fpn_1x.py')
model['pretrained'] = None
# torchvision roi align supports CPU
model['bbox_roi_extractor']['roi_layer']['use_torchvision'] = True
from mmdet.models import build_detector
context = build_detector(model, train_cfg=train_cfg, test_cfg=test_cfg)
return context
def test_ohem_sampler():
assigner = MaxIoUAssigner(
pos_iou_thr=0.5,
neg_iou_thr=0.5,
ignore_iof_thr=0.5,
ignore_wrt_candidates=False,
)
bboxes = torch.FloatTensor([
[0, 0, 10, 10],
[10, 10, 20, 20],
[5, 5, 15, 15],
[32, 32, 38, 42],
])
gt_bboxes = torch.FloatTensor([
[0, 0, 10, 9],
[0, 10, 10, 19],
])
gt_labels = torch.LongTensor([1, 2])
gt_bboxes_ignore = torch.Tensor([
[30, 30, 40, 40],
])
assign_result = assigner.assign(
bboxes,
gt_bboxes,
gt_bboxes_ignore=gt_bboxes_ignore,
gt_labels=gt_labels)
context = _context_for_ohem()
sampler = OHEMSampler(
num=10,
pos_fraction=0.5,
context=context,
neg_pos_ub=-1,
add_gt_as_proposals=True)
feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]]
sample_result = sampler.sample(
assign_result, bboxes, gt_bboxes, gt_labels, feats=feats)
assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds)
assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds)
def test_ohem_sampler_empty_gt():
assigner = MaxIoUAssigner(
pos_iou_thr=0.5,
neg_iou_thr=0.5,
ignore_iof_thr=0.5,
ignore_wrt_candidates=False,
)
bboxes = torch.FloatTensor([
[0, 0, 10, 10],
[10, 10, 20, 20],
[5, 5, 15, 15],
[32, 32, 38, 42],
])
gt_bboxes = torch.empty(0, 4)
gt_labels = torch.LongTensor([])
gt_bboxes_ignore = torch.Tensor([])
assign_result = assigner.assign(
bboxes,
gt_bboxes,
gt_bboxes_ignore=gt_bboxes_ignore,
gt_labels=gt_labels)
context = _context_for_ohem()
sampler = OHEMSampler(
num=10,
pos_fraction=0.5,
context=context,
neg_pos_ub=-1,
add_gt_as_proposals=True)
feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]]
sample_result = sampler.sample(
assign_result, bboxes, gt_bboxes, gt_labels, feats=feats)
assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds)
assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds)
def test_ohem_sampler_empty_pred():
assigner = MaxIoUAssigner(
pos_iou_thr=0.5,
neg_iou_thr=0.5,
ignore_iof_thr=0.5,
ignore_wrt_candidates=False,
)
bboxes = torch.empty(0, 4)
gt_bboxes = torch.FloatTensor([
[0, 0, 10, 10],
[10, 10, 20, 20],
[5, 5, 15, 15],
[32, 32, 38, 42],
])
gt_labels = torch.LongTensor([1, 2, 2, 3])
gt_bboxes_ignore = torch.Tensor([])
assign_result = assigner.assign(
bboxes,
gt_bboxes,
gt_bboxes_ignore=gt_bboxes_ignore,
gt_labels=gt_labels)
context = _context_for_ohem()
sampler = OHEMSampler(
num=10,
pos_fraction=0.5,
context=context,
neg_pos_ub=-1,
add_gt_as_proposals=True)
feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]]
sample_result = sampler.sample(
assign_result, bboxes, gt_bboxes, gt_labels, feats=feats)
assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds)
assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds)
def test_random_sample_result():
from mmdet.core.bbox.samplers.sampling_result import SamplingResult
SamplingResult.random(num_gts=0, num_preds=0)
SamplingResult.random(num_gts=0, num_preds=3)
SamplingResult.random(num_gts=3, num_preds=3)
SamplingResult.random(num_gts=0, num_preds=3)
SamplingResult.random(num_gts=7, num_preds=7)
SamplingResult.random(num_gts=7, num_preds=64)
SamplingResult.random(num_gts=24, num_preds=3)
for i in range(3):
SamplingResult.random(rng=i)
| 29.28
| 79
| 0.636612
| 1,040
| 7,320
| 4.193269
| 0.117308
| 0.085302
| 0.08255
| 0.022013
| 0.85164
| 0.840174
| 0.8349
| 0.79867
| 0.79867
| 0.79867
| 0
| 0.059888
| 0.244945
| 7,320
| 249
| 80
| 29.39759
| 0.729148
| 0.014208
| 0
| 0.803922
| 0
| 0
| 0.011368
| 0.004159
| 0
| 0
| 0
| 0
| 0.058824
| 1
| 0.039216
| false
| 0
| 0.04902
| 0
| 0.093137
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7a1faeab53667bcdb4ff9423302f5ed974a02e7
| 146
|
py
|
Python
|
np/reference/ch8code/stringequal.py
|
focusunsink/study_python
|
322326642db54df8725793d70a95d21ac40b6507
|
[
"MIT"
] | null | null | null |
np/reference/ch8code/stringequal.py
|
focusunsink/study_python
|
322326642db54df8725793d70a95d21ac40b6507
|
[
"MIT"
] | null | null | null |
np/reference/ch8code/stringequal.py
|
focusunsink/study_python
|
322326642db54df8725793d70a95d21ac40b6507
|
[
"MIT"
] | null | null | null |
import numpy as np
print "Pass", np.testing.assert_string_equal("NumPy", "NumPy")
print "Fail", np.testing.assert_string_equal("NumPy", "Numpy")
| 29.2
| 62
| 0.746575
| 22
| 146
| 4.772727
| 0.5
| 0.171429
| 0.285714
| 0.4
| 0.685714
| 0.685714
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0.089041
| 146
| 4
| 63
| 36.5
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0.191781
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0
| null | null | 0.333333
| 0.333333
| null | null | 0.666667
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 1
|
0
| 9
|
c7d08a6c54c9bc5f88230fcf3a3b847eda678bab
| 4,018
|
py
|
Python
|
tests/server/blueprints/alignviewers/test_alignviewers_controllers.py
|
gmc-norr/scout
|
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/server/blueprints/alignviewers/test_alignviewers_controllers.py
|
gmc-norr/scout
|
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/server/blueprints/alignviewers/test_alignviewers_controllers.py
|
gmc-norr/scout
|
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
|
[
"BSD-3-Clause"
] | null | null | null |
from scout.server.blueprints.alignviewers import controllers
BUILD37 = "37"
BUILD38 = "38"
CHROM = "22"
def test_clinvar_track_build37():
"""Test function that returns clinVar track as a dictionary when build is 37"""
# WHEN clinVar track controller is invoked with genome build 37
track = controllers.clinvar_track(BUILD37, CHROM)
# THEN it should return a dictionary with the right keys/values
assert track["name"] == "ClinVar"
assert track["type"] == "annotation"
assert track["sourceType"] == "file"
assert "hg19" in track["url"]
def test_clinvar_track_build38():
"""Test function that returns clinVar track as a dictionary when build is 38"""
# WHEN clinVar track controller is invoked with genome build 38
track = controllers.clinvar_track(BUILD38, CHROM)
# THEN it should return a dictionary with the right keys/values
assert track["name"] == "ClinVar"
assert track["type"] == "annotation"
assert track["sourceType"] == "file"
assert "hg38" in track["url"]
def test_clinvar_cnvs_track_build_37():
"""Test function that returns clinVar CNVs track as a dictionary when build is 37"""
# WHEN clinVar CNVs track controller is invoked with genome build 37
track = controllers.clinvar_cnvs_track(BUILD37, CHROM)
# THEN it should return a dictionary with the right keys/values
assert track["name"] == "ClinVar CNVs"
assert track["type"] == "annotation"
assert track["displayMode"] == "SQUISHED"
assert track["sourceType"] == "file"
assert "hg19" in track["url"]
def test_clinvar_cnvs_track_build_38():
"""Test function that returns clinVar CNVs track as a dictionary when build is 38"""
# WHEN clinVar CNVs track controller is invoked with genome build 38
track = controllers.clinvar_cnvs_track(BUILD38, CHROM)
# THEN it should return a dictionary with the right keys/values
assert track["name"] == "ClinVar CNVs"
assert track["type"] == "annotation"
assert track["displayMode"] == "SQUISHED"
assert track["sourceType"] == "file"
assert "hg38" in track["url"]
def test_reference_track_build_37():
"""Test function that returns the reference track as a dictionary when build is 37"""
# WHEN genome reference track controller is invoked with genome build 37
track = controllers.reference_track(BUILD37, CHROM)
# THEN it should return a dictionary with the right keys/values
assert "hg19" in track["fastaURL"]
assert "hg19" in track["indexURL"]
assert "hg19" in track["cytobandURL"]
def test_reference_track_build_38():
"""Test function that returns the reference track as a dictionary when build is 38"""
# WHEN genome reference track controller is invoked with genome build 38
track = controllers.reference_track(BUILD38, CHROM)
# THEN it should return a dictionary with the right keys/values
assert "hg38" in track["fastaURL"]
assert "hg38" in track["indexURL"]
assert "hg38" in track["cytobandURL"]
def test_genes_track_build_37():
"""Test function that returns the genes track as a dictionary when build is 37"""
# WHEN genes track controller is invoked with genome build 37
track = controllers.genes_track(BUILD37, CHROM)
# THEN it should return a dictionary with the right keys/values
assert track["name"] == "Genes"
assert track["type"] == "annotation"
assert track["sourceType"] == "file"
assert "hg19" in track["url"]
assert "hg19" in track["indexURL"]
def test_genes_track_build_38():
"""Test function that returns the genes track as a dictionary when build is 38"""
# WHEN genes track controller is invoked with genome build 38
track = controllers.genes_track(BUILD38, CHROM)
# THEN it should return a dictionary with the right keys/values
assert track["name"] == "Genes"
assert track["type"] == "annotation"
assert track["sourceType"] == "file"
assert "hg38" in track["url"]
assert "hg38" in track["indexURL"]
| 35.557522
| 89
| 0.70657
| 545
| 4,018
| 5.13211
| 0.102752
| 0.078656
| 0.045763
| 0.065785
| 0.927422
| 0.85842
| 0.85842
| 0.85699
| 0.839828
| 0.818019
| 0
| 0.031569
| 0.195869
| 4,018
| 112
| 90
| 35.875
| 0.834107
| 0.40667
| 0
| 0.555556
| 0
| 0
| 0.183849
| 0
| 0
| 0
| 0
| 0
| 0.62963
| 1
| 0.148148
| false
| 0
| 0.018519
| 0
| 0.166667
| 0.018519
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40232cd81520c2497b8ccede38943091d3e8d5b7
| 23,287
|
py
|
Python
|
tests/test_identities_client.py
|
FPriv/factom-harmony-connect-python-sdk
|
b853596e7ca15205e1b431f0d8af6ba423365fc7
|
[
"MIT"
] | 7
|
2019-03-27T16:20:57.000Z
|
2019-04-15T18:20:41.000Z
|
tests/test_identities_client.py
|
FPriv/factom-harmony-connect-python-sdk
|
b853596e7ca15205e1b431f0d8af6ba423365fc7
|
[
"MIT"
] | 11
|
2019-04-02T18:38:11.000Z
|
2020-01-02T19:14:44.000Z
|
tests/test_identities_client.py
|
FPriv/factom-harmony-connect-python-sdk
|
b853596e7ca15205e1b431f0d8af6ba423365fc7
|
[
"MIT"
] | 3
|
2019-04-06T04:45:45.000Z
|
2019-08-03T12:26:17.000Z
|
from unittest import TestCase
from unittest.mock import patch
from factom_sdk.client.identities_client import IdentitiesClient
from factom_sdk.request_handler.request_handler import RequestHandler
class TestIdentityClient(TestCase):
def setUp(self):
self.identities_client = IdentitiesClient("https://apicast.io", "123456", "123456789")
def tearDown(self):
self.identities_client = None
def test_init(self):
"""Check init identity client"""
self.assertTrue(isinstance(self.identities_client.request_handler, RequestHandler))
def test_create(self):
"""Check create identity"""
with self.assertRaises(Exception) as cm:
self.identities_client.create("")
self.assertTrue("at least 1 name is required." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.create("123")
self.assertTrue("names must be an array." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.create(["123"], keys=[])
self.assertTrue("at least 1 key is required." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.create(["123"], keys="123")
self.assertTrue("keys must be an array." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.create(["123"], keys=["idpub"], callback_stages="factom")
self.assertTrue("callback_stages must be an array." in str(cm.exception))
with self.assertRaises(Exception) as cm:
errors = [
{
"key": "123",
"error": "key is invalid",
},
{
"key": "123",
"error": "key is invalid",
},
]
self.identities_client.create(["123"], keys=[
"123",
"123",
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9"
])
self.assertTrue(str(errors) in str(cm.exception))
with self.assertRaises(Exception) as cm:
errors = [
{
"key": "idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
"error": "key is duplicated, keys must be unique.",
},
]
self.identities_client.create(["123"], keys=[
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
])
self.assertTrue(str(errors) in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.create(["123"], keys=[
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
"idpub2FEZg6PwVuDXfsxEMinnqVfgjuNS2GzMSQwJgTdmUFQaoYpTnv",
"idpub1tkTRwxonwCfsvTkk5enWzbZgQSRpWDYtdzPUnq83AgQtecSgc",
], callback_url="callback.com",
callback_stages=["factom", "replicated"])
self.assertTrue("callback_url is an invalid url format." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.create([
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already",
"The primary benefit of using Identities within your application the ability to verify that a certain user/device/organization/etc. actually signed and published a certain message that you see in your chain. Let is go through an example of how this creation of a signed entry works for an identity we made already", ],
callback_url="https://callback.com",
callback_stages=["factom", "replicated"]
)
self.assertTrue("Entry size 12771 must be less than 10240. Use less/shorter names or less keys."
in str(cm.exception))
with patch("factom_sdk.request_handler.request_handler.requests.request") as mock_post:
mock_post.return_value.ok = True
response = self.identities_client.create(
["123"],
callback_url="https://callback.com",
callback_stages=["factom", "replicated"]
)
self.assertIsNotNone(response)
def test_get(self):
"""Check get identity"""
with self.assertRaises(Exception) as cm:
self.identities_client.get("")
self.assertTrue("identity_chain_id is required." in str(cm.exception))
with patch("factom_sdk.request_handler.request_handler.requests.request") as mock_get:
mock_get.return_value.ok = True
response = self.identities_client.get("123")
self.assertIsNotNone(response)
def test_key_get(self):
"""Check get identity key"""
with self.assertRaises(Exception) as cm:
self.identities_client.keys.get("", "")
self.assertTrue("identity_chain_id is required." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.get("123", "")
self.assertTrue("key is required." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.get("123", "idpub")
self.assertTrue("key is invalid." in str(cm.exception))
with patch("factom_sdk.request_handler.request_handler.requests.request") as mock_get:
mock_get.return_value.ok = True
response = self.identities_client.keys.get("123", "idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9")
self.assertIsNotNone(response)
def test_key_list(self):
"""Check get all identity keys """
with self.assertRaises(Exception) as cm:
self.identities_client.keys.list("")
self.assertTrue("identity_chain_id is required." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.list("123", limit="123", offset="123")
self.assertTrue("limit must be an integer." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.list("123", limit=123, offset="123")
self.assertTrue("offset must be an integer." in str(cm.exception))
with patch("factom_sdk.request_handler.request_handler.requests.request") as mock_get:
mock_get.return_value.ok = True
response = self.identities_client.keys.list("123", limit=123, offset=123)
self.assertIsNotNone(response)
def test_key_replace(self):
"""Check create identity key replacement"""
with self.assertRaises(Exception) as cm:
self.identities_client.keys.replace("", "", "")
self.assertTrue("identity_chain_id is required." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.replace("123", "", "")
self.assertTrue("old_public_key is required." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.replace("123", "123", "")
self.assertTrue("signer_private_key is required." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.replace("123", "123", "123", new_public_key="")
self.assertTrue("new_public_key is required." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.replace("123", "idpub1", "idsec")
self.assertTrue("old_public_key is an invalid public key." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.replace("123",
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
"idsec")
self.assertTrue("signer_private_key is invalid." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.replace("123",
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
"idsec1Xbja4exmHFNgVSsk7VipNi4mwt6BjQFEZFCohs4Y7TzfhHoy6",
new_public_key="idpub2")
self.assertTrue("new_public_key is an invalid public key." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.replace("123",
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
"idsec1Xbja4exmHFNgVSsk7VipNi4mwt6BjQFEZFCohs4Y7TzfhHoy6",
new_public_key="idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
callback_stages="factom")
self.assertTrue("callback_stages must be an array." in str(cm.exception))
with self.assertRaises(Exception) as cm:
self.identities_client.keys.replace("123",
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
"idsec1Xbja4exmHFNgVSsk7VipNi4mwt6BjQFEZFCohs4Y7TzfhHoy6",
new_public_key="idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
callback_url="io.com", callback_stages=["factom"])
self.assertTrue("callback_url is an invalid url format." in str(cm.exception))
with patch("factom_sdk.request_handler.request_handler.requests.request") as mock_post:
mock_post.return_value.ok = True
response = self.identities_client.keys.replace("123",
"idpub2TWHFrWrJxVEmbeXnMRWeKBdFp7bEByosS1phV1bH7NS99zHF9",
"idsec1Xbja4exmHFNgVSsk7VipNi4mwt6BjQFEZFCohs4Y7TzfhHoy6")
self.assertIsNotNone(response)
| 95.438525
| 334
| 0.702881
| 3,108
| 23,287
| 5.227799
| 0.045689
| 0.039389
| 0.041851
| 0.046775
| 0.935438
| 0.927991
| 0.911681
| 0.895987
| 0.883986
| 0.881893
| 0
| 0.016632
| 0.240907
| 23,287
| 243
| 335
| 95.831276
| 0.902529
| 0.006742
| 0
| 0.610837
| 0
| 0.197044
| 0.648383
| 0.110611
| 0
| 0
| 0
| 0
| 0.275862
| 1
| 0.039409
| false
| 0
| 0.019704
| 0
| 0.064039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4069e613ff4184a8207e961a4cd574ab332ac3df
| 25,141
|
py
|
Python
|
sdk/python/pulumi_aws/connect/quick_connect.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/connect/quick_connect.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/connect/quick_connect.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['QuickConnectArgs', 'QuickConnect']
@pulumi.input_type
class QuickConnectArgs:
def __init__(__self__, *,
instance_id: pulumi.Input[str],
quick_connect_config: pulumi.Input['QuickConnectQuickConnectConfigArgs'],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a QuickConnect resource.
:param pulumi.Input[str] instance_id: Specifies the identifier of the hosting Amazon Connect Instance.
:param pulumi.Input['QuickConnectQuickConnectConfigArgs'] quick_connect_config: A block that defines the configuration information for the Quick Connect: `quick_connect_type` and one of `phone_config`, `queue_config`, `user_config` . The Quick Connect Config block is documented below.
:param pulumi.Input[str] description: Specifies the description of the Quick Connect.
:param pulumi.Input[str] name: Specifies the name of the Quick Connect.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Tags to apply to the Quick Connect. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
pulumi.set(__self__, "instance_id", instance_id)
pulumi.set(__self__, "quick_connect_config", quick_connect_config)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Input[str]:
"""
Specifies the identifier of the hosting Amazon Connect Instance.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="quickConnectConfig")
def quick_connect_config(self) -> pulumi.Input['QuickConnectQuickConnectConfigArgs']:
"""
A block that defines the configuration information for the Quick Connect: `quick_connect_type` and one of `phone_config`, `queue_config`, `user_config` . The Quick Connect Config block is documented below.
"""
return pulumi.get(self, "quick_connect_config")
@quick_connect_config.setter
def quick_connect_config(self, value: pulumi.Input['QuickConnectQuickConnectConfigArgs']):
pulumi.set(self, "quick_connect_config", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the description of the Quick Connect.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Quick Connect.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Tags to apply to the Quick Connect. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@pulumi.input_type
class _QuickConnectState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
quick_connect_config: Optional[pulumi.Input['QuickConnectQuickConnectConfigArgs']] = None,
quick_connect_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering QuickConnect resources.
:param pulumi.Input[str] arn: The Amazon Resource Name (ARN) of the Quick Connect.
:param pulumi.Input[str] description: Specifies the description of the Quick Connect.
:param pulumi.Input[str] instance_id: Specifies the identifier of the hosting Amazon Connect Instance.
:param pulumi.Input[str] name: Specifies the name of the Quick Connect.
:param pulumi.Input['QuickConnectQuickConnectConfigArgs'] quick_connect_config: A block that defines the configuration information for the Quick Connect: `quick_connect_type` and one of `phone_config`, `queue_config`, `user_config` . The Quick Connect Config block is documented below.
:param pulumi.Input[str] quick_connect_id: The identifier for the Quick Connect.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Tags to apply to the Quick Connect. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if description is not None:
pulumi.set(__self__, "description", description)
if instance_id is not None:
pulumi.set(__self__, "instance_id", instance_id)
if name is not None:
pulumi.set(__self__, "name", name)
if quick_connect_config is not None:
pulumi.set(__self__, "quick_connect_config", quick_connect_config)
if quick_connect_id is not None:
pulumi.set(__self__, "quick_connect_id", quick_connect_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
The Amazon Resource Name (ARN) of the Quick Connect.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the description of the Quick Connect.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the identifier of the hosting Amazon Connect Instance.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Quick Connect.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="quickConnectConfig")
def quick_connect_config(self) -> Optional[pulumi.Input['QuickConnectQuickConnectConfigArgs']]:
"""
A block that defines the configuration information for the Quick Connect: `quick_connect_type` and one of `phone_config`, `queue_config`, `user_config` . The Quick Connect Config block is documented below.
"""
return pulumi.get(self, "quick_connect_config")
@quick_connect_config.setter
def quick_connect_config(self, value: Optional[pulumi.Input['QuickConnectQuickConnectConfigArgs']]):
pulumi.set(self, "quick_connect_config", value)
@property
@pulumi.getter(name="quickConnectId")
def quick_connect_id(self) -> Optional[pulumi.Input[str]]:
"""
The identifier for the Quick Connect.
"""
return pulumi.get(self, "quick_connect_id")
@quick_connect_id.setter
def quick_connect_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "quick_connect_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Tags to apply to the Quick Connect. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
class QuickConnect(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
quick_connect_config: Optional[pulumi.Input[pulumi.InputType['QuickConnectQuickConnectConfigArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Provides an Amazon Connect Quick Connect resource. For more information see
[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html)
## Example Usage
```python
import pulumi
import pulumi_aws as aws
test = aws.connect.QuickConnect("test",
description="quick connect phone number",
instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111",
quick_connect_config=aws.connect.QuickConnectQuickConnectConfigArgs(
phone_configs=[aws.connect.QuickConnectQuickConnectConfigPhoneConfigArgs(
phone_number="+12345678912",
)],
quick_connect_type="PHONE_NUMBER",
),
tags={
"Name": "Example Quick Connect",
})
```
## Import
Amazon Connect Quick Connects can be imported using the `instance_id` and `quick_connect_id` separated by a colon (`:`), e.g.,
```sh
$ pulumi import aws:connect/quickConnect:QuickConnect example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: Specifies the description of the Quick Connect.
:param pulumi.Input[str] instance_id: Specifies the identifier of the hosting Amazon Connect Instance.
:param pulumi.Input[str] name: Specifies the name of the Quick Connect.
:param pulumi.Input[pulumi.InputType['QuickConnectQuickConnectConfigArgs']] quick_connect_config: A block that defines the configuration information for the Quick Connect: `quick_connect_type` and one of `phone_config`, `queue_config`, `user_config` . The Quick Connect Config block is documented below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Tags to apply to the Quick Connect. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: QuickConnectArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an Amazon Connect Quick Connect resource. For more information see
[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html)
## Example Usage
```python
import pulumi
import pulumi_aws as aws
test = aws.connect.QuickConnect("test",
description="quick connect phone number",
instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111",
quick_connect_config=aws.connect.QuickConnectQuickConnectConfigArgs(
phone_configs=[aws.connect.QuickConnectQuickConnectConfigPhoneConfigArgs(
phone_number="+12345678912",
)],
quick_connect_type="PHONE_NUMBER",
),
tags={
"Name": "Example Quick Connect",
})
```
## Import
Amazon Connect Quick Connects can be imported using the `instance_id` and `quick_connect_id` separated by a colon (`:`), e.g.,
```sh
$ pulumi import aws:connect/quickConnect:QuickConnect example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5
```
:param str resource_name: The name of the resource.
:param QuickConnectArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(QuickConnectArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
quick_connect_config: Optional[pulumi.Input[pulumi.InputType['QuickConnectQuickConnectConfigArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = QuickConnectArgs.__new__(QuickConnectArgs)
__props__.__dict__["description"] = description
if instance_id is None and not opts.urn:
raise TypeError("Missing required property 'instance_id'")
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["name"] = name
if quick_connect_config is None and not opts.urn:
raise TypeError("Missing required property 'quick_connect_config'")
__props__.__dict__["quick_connect_config"] = quick_connect_config
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
__props__.__dict__["arn"] = None
__props__.__dict__["quick_connect_id"] = None
super(QuickConnect, __self__).__init__(
'aws:connect/quickConnect:QuickConnect',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
quick_connect_config: Optional[pulumi.Input[pulumi.InputType['QuickConnectQuickConnectConfigArgs']]] = None,
quick_connect_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'QuickConnect':
"""
Get an existing QuickConnect resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The Amazon Resource Name (ARN) of the Quick Connect.
:param pulumi.Input[str] description: Specifies the description of the Quick Connect.
:param pulumi.Input[str] instance_id: Specifies the identifier of the hosting Amazon Connect Instance.
:param pulumi.Input[str] name: Specifies the name of the Quick Connect.
:param pulumi.Input[pulumi.InputType['QuickConnectQuickConnectConfigArgs']] quick_connect_config: A block that defines the configuration information for the Quick Connect: `quick_connect_type` and one of `phone_config`, `queue_config`, `user_config` . The Quick Connect Config block is documented below.
:param pulumi.Input[str] quick_connect_id: The identifier for the Quick Connect.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Tags to apply to the Quick Connect. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _QuickConnectState.__new__(_QuickConnectState)
__props__.__dict__["arn"] = arn
__props__.__dict__["description"] = description
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["name"] = name
__props__.__dict__["quick_connect_config"] = quick_connect_config
__props__.__dict__["quick_connect_id"] = quick_connect_id
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
return QuickConnect(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The Amazon Resource Name (ARN) of the Quick Connect.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the description of the Quick Connect.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Output[str]:
"""
Specifies the identifier of the hosting Amazon Connect Instance.
"""
return pulumi.get(self, "instance_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Quick Connect.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="quickConnectConfig")
def quick_connect_config(self) -> pulumi.Output['outputs.QuickConnectQuickConnectConfig']:
"""
A block that defines the configuration information for the Quick Connect: `quick_connect_type` and one of `phone_config`, `queue_config`, `user_config` . The Quick Connect Config block is documented below.
"""
return pulumi.get(self, "quick_connect_config")
@property
@pulumi.getter(name="quickConnectId")
def quick_connect_id(self) -> pulumi.Output[str]:
"""
The identifier for the Quick Connect.
"""
return pulumi.get(self, "quick_connect_id")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Tags to apply to the Quick Connect. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
| 50.181637
| 343
| 0.6703
| 2,981
| 25,141
| 5.459242
| 0.072123
| 0.081111
| 0.067961
| 0.043259
| 0.881467
| 0.866536
| 0.845582
| 0.82936
| 0.823584
| 0.81621
| 0
| 0.005999
| 0.224215
| 25,141
| 500
| 344
| 50.282
| 0.828394
| 0.426435
| 0
| 0.69145
| 1
| 0
| 0.109207
| 0.030691
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159851
| false
| 0.003717
| 0.026022
| 0
| 0.282528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4093be00ff2db1d4d433d5a385a5a9a741d68129
| 4,006
|
py
|
Python
|
low_level_simulation/build/rosbridge_suite/rosapi/cmake/rosapi-genmsg-context.py
|
abiantorres/autonomous-vehicles-system-simulation
|
3f0112036b2b270f5055729c648a1310976df933
|
[
"Apache-2.0"
] | null | null | null |
low_level_simulation/build/rosbridge_suite/rosapi/cmake/rosapi-genmsg-context.py
|
abiantorres/autonomous-vehicles-system-simulation
|
3f0112036b2b270f5055729c648a1310976df933
|
[
"Apache-2.0"
] | null | null | null |
low_level_simulation/build/rosbridge_suite/rosapi/cmake/rosapi-genmsg-context.py
|
abiantorres/autonomous-vehicles-system-simulation
|
3f0112036b2b270f5055729c648a1310976df933
|
[
"Apache-2.0"
] | null | null | null |
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/msg/TypeDef.msg"
services_str = "/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/DeleteParam.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/GetActionServers.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/GetParam.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/GetParamNames.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/GetTime.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/HasParam.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/MessageDetails.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/Nodes.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/NodeDetails.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/Publishers.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/SearchParam.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/ServiceHost.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/ServiceNode.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/ServiceProviders.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/ServiceRequestDetails.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/ServiceResponseDetails.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/Services.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/ServicesForType.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/ServiceType.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/SetParam.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/Subscribers.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/Topics.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/TopicsForType.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/srv/TopicType.srv"
pkg_name = "rosapi"
dependencies_str = ""
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "rosapi;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosapi/msg"
PYTHON_EXECUTABLE = "/usr/bin/python"
package_has_static_sources = 'TRUE' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| 333.833333
| 3,364
| 0.872941
| 521
| 4,006
| 6.53167
| 0.151631
| 0.114605
| 0.191008
| 0.213929
| 0.831913
| 0.831913
| 0.831913
| 0.831913
| 0.831913
| 0.831913
| 0
| 0
| 0.008487
| 4,006
| 11
| 3,365
| 364.181818
| 0.856747
| 0.012232
| 0
| 0
| 1
| 0.333333
| 0.948673
| 0.94134
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
40aa2c1a35766e58f9fb4a69516a6975a3b3464b
| 145
|
py
|
Python
|
web_commands/german.py
|
SalamiArmy/InfoBoet
|
71df751e50b4b458db44444ef7d6dfe6f68e648f
|
[
"Apache-2.0"
] | 1
|
2021-07-14T21:48:48.000Z
|
2021-07-14T21:48:48.000Z
|
web_commands/german.py
|
SalamiArmy/InfoBoet
|
71df751e50b4b458db44444ef7d6dfe6f68e648f
|
[
"Apache-2.0"
] | 6
|
2017-11-27T06:04:34.000Z
|
2020-02-19T05:15:02.000Z
|
web_commands/german.py
|
SalamiArmy/InfoBoet
|
71df751e50b4b458db44444ef7d6dfe6f68e648f
|
[
"Apache-2.0"
] | 2
|
2017-08-16T20:26:02.000Z
|
2020-11-10T18:44:11.000Z
|
import telegram_commands.getgerman as getgerman
def run(keyConfig, message, totalResults=1):
getgerman.run(keyConfig, message, totalResults)
| 36.25
| 51
| 0.813793
| 17
| 145
| 6.882353
| 0.647059
| 0.205128
| 0.324786
| 0.529915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007692
| 0.103448
| 145
| 3
| 52
| 48.333333
| 0.892308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
dc16b82124973e172daea01385ba5cda637046ca
| 2,480
|
py
|
Python
|
entertainment.py
|
SangramChavan/Movie-website-with-Python
|
c1e593ca017bbc359e0de14be632454bf96f5be1
|
[
"MIT"
] | null | null | null |
entertainment.py
|
SangramChavan/Movie-website-with-Python
|
c1e593ca017bbc359e0de14be632454bf96f5be1
|
[
"MIT"
] | null | null | null |
entertainment.py
|
SangramChavan/Movie-website-with-Python
|
c1e593ca017bbc359e0de14be632454bf96f5be1
|
[
"MIT"
] | null | null | null |
import media
import fresh_tomatoes
chakras = media.Movie("7 Chakras",
"How To Open Your 7 Chakras Explained",
"https://images-na.ssl-images-amazon.com/images/M/MV5BMTM3MTc3OTc0NF5BMl5BanBnXkFtZTcwOTQ0OTM1MQ@@._V1._CR34,0,295,440_UX182_CR0,0,182,268_AL__QL50.jpg",
"https://www.youtube.com/watch?v=StrbppmsZJw"
)
#print(chakras.storyline)
chakras1 = media.Movie("7 Chakras",
"How To Open Your 7 Chakras Explained",
"https://images-na.ssl-images-amazon.com/images/M/MV5BMTM3MTc3OTc0NF5BMl5BanBnXkFtZTcwOTQ0OTM1MQ@@._V1._CR34,0,295,440_UX182_CR0,0,182,268_AL__QL50.jpg",
"https://www.youtube.com/watch?v=StrbppmsZJw"
)
#print(chakras1.storyline)
chakras2 = media.Movie("7 Chakras",
"How To Open Your 7 Chakras Explained",
"https://images-na.ssl-images-amazon.com/images/M/MV5BMTM3MTc3OTc0NF5BMl5BanBnXkFtZTcwOTQ0OTM1MQ@@._V1._CR34,0,295,440_UX182_CR0,0,182,268_AL__QL50.jpg",
"https://www.youtube.com/watch?v=StrbppmsZJw"
)
#print(chakras2.storyline)
chakras3 = media.Movie("7 Chakras",
"How To Open Your 7 Chakras Explained",
"https://images-na.ssl-images-amazon.com/images/M/MV5BMTM3MTc3OTc0NF5BMl5BanBnXkFtZTcwOTQ0OTM1MQ@@._V1._CR34,0,295,440_UX182_CR0,0,182,268_AL__QL50.jpg",
"https://www.youtube.com/watch?v=StrbppmsZJw"
)
#print(chakras3.storyline)
chakras4 = media.Movie("7 Chakras",
"How To Open Your 7 Chakras Explained",
"https://images-na.ssl-images-amazon.com/images/M/MV5BMTM3MTc3OTc0NF5BMl5BanBnXkFtZTcwOTQ0OTM1MQ@@._V1._CR34,0,295,440_UX182_CR0,0,182,268_AL__QL50.jpg",
"https://www.youtube.com/watch?v=StrbppmsZJw"
)
chakras5 = media.Movie("7 Chakras",
"How To Open Your 7 Chakras Explained",
"https://images-na.ssl-images-amazon.com/images/M/MV5BMTM3MTc3OTc0NF5BMl5BanBnXkFtZTcwOTQ0OTM1MQ@@._V1._CR34,0,295,440_UX182_CR0,0,182,268_AL__QL50.jpg",
"https://www.youtube.com/watch?v=StrbppmsZJw"
)
movies = [chakras, chakras1, chakras2, chakras3, chakras4, chakras5]
fresh_tomatoes.open_movies_page(movies)
#chakras.show_trailer()
| 49.6
| 175
| 0.620565
| 285
| 2,480
| 5.235088
| 0.168421
| 0.064343
| 0.044236
| 0.072386
| 0.83378
| 0.83378
| 0.83378
| 0.83378
| 0.83378
| 0.83378
| 0
| 0.114674
| 0.258065
| 2,480
| 49
| 176
| 50.612245
| 0.696196
| 0.04879
| 0
| 0.529412
| 0
| 0.176471
| 0.607401
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc16b9ff43dbaf66bbb44370ee0c5864a55aba86
| 16,605
|
py
|
Python
|
src/conductor/client/http/api/queue_admin_resource_api.py
|
conductor-sdk/conductor-python
|
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
|
[
"Apache-2.0"
] | 3
|
2022-03-10T18:24:46.000Z
|
2022-03-22T20:49:30.000Z
|
src/conductor/client/http/api/queue_admin_resource_api.py
|
conductor-sdk/conductor-python
|
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
|
[
"Apache-2.0"
] | 6
|
2022-03-08T17:48:28.000Z
|
2022-03-30T00:39:22.000Z
|
src/conductor/client/http/api/queue_admin_resource_api.py
|
conductor-sdk/conductor-python
|
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from conductor.client.http.api_client import ApiClient
class QueueAdminResourceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def names(self, **kwargs): # noqa: E501
"""Get Queue Names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.names(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, str)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.names_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.names_with_http_info(**kwargs) # noqa: E501
return data
def names_with_http_info(self, **kwargs): # noqa: E501
"""Get Queue Names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.names_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, str)
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method names" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/queue/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, str)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def size1(self, **kwargs): # noqa: E501
"""Get the queue length # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.size1(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, int)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.size1_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.size1_with_http_info(**kwargs) # noqa: E501
return data
def size1_with_http_info(self, **kwargs): # noqa: E501
"""Get the queue length # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.size1_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, int)
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method size1" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/queue/size', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, int)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update1(self, body, workflow_id, task_ref_name, status, **kwargs): # noqa: E501
"""Publish a message in queue to mark a wait task as completed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update1(body, workflow_id, task_ref_name, status, async_req=True)
>>> result = thread.get()
:param async_req bool
:param dict(str, object) body: (required)
:param str workflow_id: (required)
:param str task_ref_name: (required)
:param str status: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update1_with_http_info(body, workflow_id, task_ref_name, status, **kwargs) # noqa: E501
else:
(data) = self.update1_with_http_info(body, workflow_id, task_ref_name, status, **kwargs) # noqa: E501
return data
def update1_with_http_info(self, body, workflow_id, task_ref_name, status, **kwargs): # noqa: E501
"""Publish a message in queue to mark a wait task as completed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update1_with_http_info(body, workflow_id, task_ref_name, status, async_req=True)
>>> result = thread.get()
:param async_req bool
:param dict(str, object) body: (required)
:param str workflow_id: (required)
:param str task_ref_name: (required)
:param str status: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'workflow_id', 'task_ref_name', 'status'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update1" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update1`") # noqa: E501
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling `update1`") # noqa: E501
# verify the required parameter 'task_ref_name' is set
if ('task_ref_name' not in params or
params['task_ref_name'] is None):
raise ValueError("Missing the required parameter `task_ref_name` when calling `update1`") # noqa: E501
# verify the required parameter 'status' is set
if ('status' not in params or
params['status'] is None):
raise ValueError("Missing the required parameter `status` when calling `update1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflowId'] = params['workflow_id'] # noqa: E501
if 'task_ref_name' in params:
path_params['taskRefName'] = params['task_ref_name'] # noqa: E501
if 'status' in params:
path_params['status'] = params['status'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/queue/update/{workflowId}/{taskRefName}/{status}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_by_task_id(self, body, workflow_id, task_id, status, **kwargs): # noqa: E501
"""Publish a message in queue to mark a wait task (by taskId) as completed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_by_task_id(body, workflow_id, task_id, status, async_req=True)
>>> result = thread.get()
:param async_req bool
:param dict(str, object) body: (required)
:param str workflow_id: (required)
:param str task_id: (required)
:param str status: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_by_task_id_with_http_info(body, workflow_id, task_id, status, **kwargs) # noqa: E501
else:
(data) = self.update_by_task_id_with_http_info(body, workflow_id, task_id, status, **kwargs) # noqa: E501
return data
def update_by_task_id_with_http_info(self, body, workflow_id, task_id, status, **kwargs): # noqa: E501
"""Publish a message in queue to mark a wait task (by taskId) as completed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_by_task_id_with_http_info(body, workflow_id, task_id, status, async_req=True)
>>> result = thread.get()
:param async_req bool
:param dict(str, object) body: (required)
:param str workflow_id: (required)
:param str task_id: (required)
:param str status: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'workflow_id', 'task_id', 'status'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_by_task_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_by_task_id`") # noqa: E501
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling `update_by_task_id`") # noqa: E501
# verify the required parameter 'task_id' is set
if ('task_id' not in params or
params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `update_by_task_id`") # noqa: E501
# verify the required parameter 'status' is set
if ('status' not in params or
params['status'] is None):
raise ValueError("Missing the required parameter `status` when calling `update_by_task_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflowId'] = params['workflow_id'] # noqa: E501
if 'task_id' in params:
path_params['taskId'] = params['task_id'] # noqa: E501
if 'status' in params:
path_params['status'] = params['status'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/queue/update/{workflowId}/task/{taskId}/{status}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.978873
| 123
| 0.598796
| 1,958
| 16,605
| 4.838611
| 0.078141
| 0.047287
| 0.023644
| 0.030399
| 0.938358
| 0.933819
| 0.927064
| 0.919569
| 0.910914
| 0.876504
| 0
| 0.01658
| 0.306233
| 16,605
| 425
| 124
| 39.070588
| 0.805816
| 0.307257
| 0
| 0.759494
| 0
| 0
| 0.193775
| 0.034686
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037975
| false
| 0
| 0.016878
| 0
| 0.109705
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
906f8c24347b215df81f56f9171f39e738749d0e
| 126,018
|
py
|
Python
|
symphony/bdk/gen/agent_api/dlp_policies_and_dictionary_management_api.py
|
symphony-mariacristina/symphony-bdk-python
|
ef65762739890e826ccfaf38f7a41d61b95e7f22
|
[
"Apache-2.0"
] | null | null | null |
symphony/bdk/gen/agent_api/dlp_policies_and_dictionary_management_api.py
|
symphony-mariacristina/symphony-bdk-python
|
ef65762739890e826ccfaf38f7a41d61b95e7f22
|
[
"Apache-2.0"
] | null | null | null |
symphony/bdk/gen/agent_api/dlp_policies_and_dictionary_management_api.py
|
symphony-mariacristina/symphony-bdk-python
|
ef65762739890e826ccfaf38f7a41d61b95e7f22
|
[
"Apache-2.0"
] | null | null | null |
"""
Agent API
This document refers to Symphony API calls to send and receive messages and content. They need the on-premise Agent installed to perform decryption/encryption of content. - sessionToken and keyManagerToken can be obtained by calling the authenticationAPI on the symphony back end and the key manager respectively. Refer to the methods described in authenticatorAPI.yaml. - A new authorizationToken has been introduced in the authenticationAPI response payload. It can be used to replace the sessionToken in any of the API calls and can be passed as \"Authorization\" header. - Actions are defined to be atomic, ie will succeed in their entirety or fail and have changed nothing. - If it returns a 40X status then it will have sent no message to any stream even if a request to some subset of the requested streams would have succeeded. - If this contract cannot be met for any reason then this is an error and the response code will be 50X. - MessageML is a markup language for messages. See reference here: https://rest-api.symphony.com/docs/messagemlv2 - **Real Time Events**: The following events are returned when reading from a real time messages and events stream (\"datafeed\"). These events will be returned for datafeeds created with the v5 endpoints. To know more about the endpoints, refer to Create Messages/Events Stream and Read Messages/Events Stream. Unless otherwise specified, all events were added in 1.46. # noqa: E501
The version of the OpenAPI document: 20.14.0-SNAPSHOT
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from symphony.bdk.gen.api_client import ApiClient, Endpoint as _Endpoint
from symphony.bdk.gen.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from symphony.bdk.gen.agent_model.error import Error
from symphony.bdk.gen.agent_model.v1_dlp_dictionary_metadata_collection_response import V1DLPDictionaryMetadataCollectionResponse
from symphony.bdk.gen.agent_model.v1_dlp_dictionary_metadata_create_request import V1DLPDictionaryMetadataCreateRequest
from symphony.bdk.gen.agent_model.v1_dlp_dictionary_metadata_response import V1DLPDictionaryMetadataResponse
from symphony.bdk.gen.agent_model.v1_dlp_dictionary_metadata_update_request import V1DLPDictionaryMetadataUpdateRequest
from symphony.bdk.gen.agent_model.v1_dlp_policies_collection_response import V1DLPPoliciesCollectionResponse
from symphony.bdk.gen.agent_model.v1_dlp_policy_request import V1DLPPolicyRequest
from symphony.bdk.gen.agent_model.v1_dlp_policy_response import V1DLPPolicyResponse
from symphony.bdk.gen.agent_model.v3_dlp_policies_collection_response import V3DLPPoliciesCollectionResponse
from symphony.bdk.gen.agent_model.v3_dlp_policy_request import V3DLPPolicyRequest
from symphony.bdk.gen.agent_model.v3_dlp_policy_response import V3DLPPolicyResponse
class DLPPoliciesAndDictionaryManagementApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.v1_dlp_dictionaries_dict_id_data_download_get_endpoint = _Endpoint(
settings={
'response_type': (str,),
'auth': [],
'endpoint_path': '/v1/dlp/dictionaries/{dictId}/data/download',
'operation_id': 'v1_dlp_dictionaries_dict_id_data_download_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'dict_id',
'dict_version',
],
'required': [
'session_token',
'key_manager_token',
'dict_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'dict_id':
(str,),
'dict_version':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'dict_id': 'dictId',
'dict_version': 'dictVersion',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'dict_id': 'path',
'dict_version': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/octet-stream'
],
'content_type': [],
},
api_client=api_client
)
self.v1_dlp_dictionaries_dict_id_data_upload_post_endpoint = _Endpoint(
settings={
'response_type': (V1DLPDictionaryMetadataResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/dictionaries/{dictId}/data/upload',
'operation_id': 'v1_dlp_dictionaries_dict_id_data_upload_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'dict_id',
'data',
],
'required': [
'session_token',
'key_manager_token',
'dict_id',
'data',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'dict_id':
(str,),
'data':
(file_type,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'dict_id': 'dictId',
'data': 'data',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'dict_id': 'path',
'data': 'form',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [
'multipart/form-data'
]
},
api_client=api_client
)
self.v1_dlp_dictionaries_dict_id_delete_endpoint = _Endpoint(
settings={
'response_type': (V1DLPDictionaryMetadataResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/dictionaries/{dictId}',
'operation_id': 'v1_dlp_dictionaries_dict_id_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'dict_id',
],
'required': [
'session_token',
'key_manager_token',
'dict_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'dict_id':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'dict_id': 'dictId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'dict_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v1_dlp_dictionaries_dict_id_get_endpoint = _Endpoint(
settings={
'response_type': (V1DLPDictionaryMetadataResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/dictionaries/{dictId}',
'operation_id': 'v1_dlp_dictionaries_dict_id_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'dict_id',
'dict_version',
],
'required': [
'session_token',
'key_manager_token',
'dict_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'dict_id':
(str,),
'dict_version':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'dict_id': 'dictId',
'dict_version': 'dictVersion',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'dict_id': 'path',
'dict_version': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v1_dlp_dictionaries_dict_id_put_endpoint = _Endpoint(
settings={
'response_type': (V1DLPDictionaryMetadataResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/dictionaries/{dictId}',
'operation_id': 'v1_dlp_dictionaries_dict_id_put',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'dict_id',
'body',
],
'required': [
'session_token',
'key_manager_token',
'dict_id',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'dict_id':
(str,),
'body':
(V1DLPDictionaryMetadataUpdateRequest,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'dict_id': 'dictId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'dict_id': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.v1_dlp_dictionaries_get_endpoint = _Endpoint(
settings={
'response_type': (V1DLPDictionaryMetadataCollectionResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/dictionaries',
'operation_id': 'v1_dlp_dictionaries_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'page',
'limit',
],
'required': [
'session_token',
'key_manager_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'page':
(int,),
'limit':
(int,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'page': 'page',
'limit': 'limit',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'page': 'query',
'limit': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v1_dlp_dictionaries_post_endpoint = _Endpoint(
settings={
'response_type': (V1DLPDictionaryMetadataResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/dictionaries',
'operation_id': 'v1_dlp_dictionaries_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'body',
],
'required': [
'session_token',
'key_manager_token',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'body':
(V1DLPDictionaryMetadataCreateRequest,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.v1_dlp_policies_get_endpoint = _Endpoint(
settings={
'response_type': (V1DLPPoliciesCollectionResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/policies',
'operation_id': 'v1_dlp_policies_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'page',
'limit',
],
'required': [
'session_token',
'key_manager_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'page':
(int,),
'limit':
(int,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'page': 'page',
'limit': 'limit',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'page': 'query',
'limit': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v1_dlp_policies_policy_id_delete_endpoint = _Endpoint(
settings={
'response_type': (V1DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/policies/{policyId}',
'operation_id': 'v1_dlp_policies_policy_id_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v1_dlp_policies_policy_id_disable_post_endpoint = _Endpoint(
settings={
'response_type': (V1DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/policies/{policyId}/disable',
'operation_id': 'v1_dlp_policies_policy_id_disable_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v1_dlp_policies_policy_id_enable_post_endpoint = _Endpoint(
settings={
'response_type': (V1DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/policies/{policyId}/enable',
'operation_id': 'v1_dlp_policies_policy_id_enable_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v1_dlp_policies_policy_id_get_endpoint = _Endpoint(
settings={
'response_type': (V1DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/policies/{policyId}',
'operation_id': 'v1_dlp_policies_policy_id_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
'policy_version',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
'policy_version':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
'policy_version': 'policyVersion',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
'policy_version': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v1_dlp_policies_policy_id_put_endpoint = _Endpoint(
settings={
'response_type': (V1DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/policies/{policyId}',
'operation_id': 'v1_dlp_policies_policy_id_put',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
'body',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
'body':
(V1DLPPolicyRequest,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.v1_dlp_policies_post_endpoint = _Endpoint(
settings={
'response_type': (V1DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v1/dlp/policies',
'operation_id': 'v1_dlp_policies_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'body',
],
'required': [
'session_token',
'key_manager_token',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'body':
(V1DLPPolicyRequest,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.v3_dlp_policies_get_endpoint = _Endpoint(
settings={
'response_type': (V3DLPPoliciesCollectionResponse,),
'auth': [],
'endpoint_path': '/v3/dlp/policies',
'operation_id': 'v3_dlp_policies_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'page',
'limit',
],
'required': [
'session_token',
'key_manager_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'page':
(int,),
'limit':
(int,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'page': 'page',
'limit': 'limit',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'page': 'query',
'limit': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v3_dlp_policies_policy_id_delete_post_endpoint = _Endpoint(
settings={
'response_type': (V3DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v3/dlp/policies/{policyId}/delete',
'operation_id': 'v3_dlp_policies_policy_id_delete_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v3_dlp_policies_policy_id_disable_post_endpoint = _Endpoint(
settings={
'response_type': (V3DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v3/dlp/policies/{policyId}/disable',
'operation_id': 'v3_dlp_policies_policy_id_disable_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v3_dlp_policies_policy_id_enable_post_endpoint = _Endpoint(
settings={
'response_type': (V3DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v3/dlp/policies/{policyId}/enable',
'operation_id': 'v3_dlp_policies_policy_id_enable_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v3_dlp_policies_policy_id_get_endpoint = _Endpoint(
settings={
'response_type': (V3DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v3/dlp/policies/{policyId}',
'operation_id': 'v3_dlp_policies_policy_id_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
'policy_version',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
'policy_version':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
'policy_version': 'policyVersion',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
'policy_version': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client
)
self.v3_dlp_policies_policy_id_update_post_endpoint = _Endpoint(
settings={
'response_type': (V3DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v3/dlp/policies/{policyId}/update',
'operation_id': 'v3_dlp_policies_policy_id_update_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'policy_id',
'body',
],
'required': [
'session_token',
'key_manager_token',
'policy_id',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'policy_id':
(str,),
'body':
(V3DLPPolicyRequest,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'policy_id': 'policyId',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'policy_id': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.v3_dlp_policies_post_endpoint = _Endpoint(
settings={
'response_type': (V3DLPPolicyResponse,),
'auth': [],
'endpoint_path': '/v3/dlp/policies',
'operation_id': 'v3_dlp_policies_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'body',
],
'required': [
'session_token',
'key_manager_token',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'body':
(V3DLPPolicyRequest,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def v1_dlp_dictionaries_dict_id_data_download_get(
self,
session_token,
key_manager_token,
dict_id,
**kwargs
):
"""Downloads Base 64 encoded dictionary content. # noqa: E501
Downloads Base 64 encoded dictionary content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_dictionaries_dict_id_data_download_get(session_token, key_manager_token, dict_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
dict_id (str): Unique dictionary identifier
Keyword Args:
dict_version (str): If set to be valid dictionary version number, will return dictionary with specified version. Otherwise, return the latest dictionary. . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['dict_id'] = \
dict_id
return self.v1_dlp_dictionaries_dict_id_data_download_get_endpoint.call_with_http_info(**kwargs)
def v1_dlp_dictionaries_dict_id_data_upload_post(
self,
session_token,
key_manager_token,
dict_id,
data,
**kwargs
):
"""Override dictionary content with provided content. # noqa: E501
Override dictionary content with provided content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_dictionaries_dict_id_data_upload_post(session_token, key_manager_token, dict_id, data, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
dict_id (str): Unique dictionary identifier
data (file_type):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPDictionaryMetadataResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['dict_id'] = \
dict_id
kwargs['data'] = \
data
return self.v1_dlp_dictionaries_dict_id_data_upload_post_endpoint.call_with_http_info(**kwargs)
def v1_dlp_dictionaries_dict_id_delete(
self,
session_token,
key_manager_token,
dict_id,
**kwargs
):
"""Delete a dictionary # noqa: E501
Deletes a dictionary. Note: All related policies will be affected. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_dictionaries_dict_id_delete(session_token, key_manager_token, dict_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
dict_id (str): Unique dictionary identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPDictionaryMetadataResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['dict_id'] = \
dict_id
return self.v1_dlp_dictionaries_dict_id_delete_endpoint.call_with_http_info(**kwargs)
def v1_dlp_dictionaries_dict_id_get(
self,
session_token,
key_manager_token,
dict_id,
**kwargs
):
"""Get dictionary metadata # noqa: E501
Get basic information for a dictionary. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_dictionaries_dict_id_get(session_token, key_manager_token, dict_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
dict_id (str): Unique dictionary identifier
Keyword Args:
dict_version (str): If set to be valid dictionary version number, will return dictionary metadata with specified version. Otherwise, return the latest dictionary metadata. . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPDictionaryMetadataResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['dict_id'] = \
dict_id
return self.v1_dlp_dictionaries_dict_id_get_endpoint.call_with_http_info(**kwargs)
def v1_dlp_dictionaries_dict_id_put(
self,
session_token,
key_manager_token,
dict_id,
body,
**kwargs
):
"""Updates a dictionary # noqa: E501
Updates the dictionary's basic metadata without content. This API cannot be used for creating a new dictionary. In case of update only \"name\" can be changed. Note: All related policies will also have versions updated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_dictionaries_dict_id_put(session_token, key_manager_token, dict_id, body, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
dict_id (str): Unique dictionary identifier
body (V1DLPDictionaryMetadataUpdateRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPDictionaryMetadataResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['dict_id'] = \
dict_id
kwargs['body'] = \
body
return self.v1_dlp_dictionaries_dict_id_put_endpoint.call_with_http_info(**kwargs)
def v1_dlp_dictionaries_get(
self,
session_token,
key_manager_token,
**kwargs
):
"""Get all dictionary metadatas # noqa: E501
Get all dictionary metadatas with the latest version. Each dictionary object will only contain meta data of the content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_dictionaries_get(session_token, key_manager_token, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
Keyword Args:
page (int): Optional parameter to specify which page to return (default is 0). [optional]
limit (int): Optional parameter to specify the number of result to return per page, default is 50. Maximum is 50. . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPDictionaryMetadataCollectionResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
return self.v1_dlp_dictionaries_get_endpoint.call_with_http_info(**kwargs)
def v1_dlp_dictionaries_post(
self,
session_token,
key_manager_token,
body,
**kwargs
):
"""Create a dictionary # noqa: E501
Creates a dictionary with basic metadata and no content. Only \"name\" and \"type\" field is used to create a new dictionary entry. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_dictionaries_post(session_token, key_manager_token, body, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
body (V1DLPDictionaryMetadataCreateRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPDictionaryMetadataResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['body'] = \
body
return self.v1_dlp_dictionaries_post_endpoint.call_with_http_info(**kwargs)
def v1_dlp_policies_get(
self,
session_token,
key_manager_token,
**kwargs
):
"""Get all policies # noqa: E501
Get all policies # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_policies_get(session_token, key_manager_token, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
Keyword Args:
page (int): Optional parameter to specify which page to return (default is 0). [optional]
limit (int): Optional parameter to specify the number of result to return per page, default is 50. Maximum is 50. . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPPoliciesCollectionResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
return self.v1_dlp_policies_get_endpoint.call_with_http_info(**kwargs)
def v1_dlp_policies_policy_id_delete(
self,
session_token,
key_manager_token,
policy_id,
**kwargs
):
"""Delete a policy # noqa: E501
Delete a policy. Note: Only disabled policy can be deleted # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_policies_policy_id_delete(session_token, key_manager_token, policy_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str): Unique dictionary identifier.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
return self.v1_dlp_policies_policy_id_delete_endpoint.call_with_http_info(**kwargs)
def v1_dlp_policies_policy_id_disable_post(
self,
session_token,
key_manager_token,
policy_id,
**kwargs
):
"""Disables a policy. # noqa: E501
Disables a policy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_policies_policy_id_disable_post(session_token, key_manager_token, policy_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
return self.v1_dlp_policies_policy_id_disable_post_endpoint.call_with_http_info(**kwargs)
def v1_dlp_policies_policy_id_enable_post(
self,
session_token,
key_manager_token,
policy_id,
**kwargs
):
"""Enables a policy. # noqa: E501
Enables a policy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_policies_policy_id_enable_post(session_token, key_manager_token, policy_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
return self.v1_dlp_policies_policy_id_enable_post_endpoint.call_with_http_info(**kwargs)
def v1_dlp_policies_policy_id_get(
self,
session_token,
key_manager_token,
policy_id,
**kwargs
):
"""Get a policy # noqa: E501
Get a policy # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_policies_policy_id_get(session_token, key_manager_token, policy_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str): Unique dictionary identifier.
Keyword Args:
policy_version (str): Optional parameter, if set to be valid policy version number, will return policy with specified policyVersion. Otherwise, return the latest policy. . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
return self.v1_dlp_policies_policy_id_get_endpoint.call_with_http_info(**kwargs)
def v1_dlp_policies_policy_id_put(
self,
session_token,
key_manager_token,
policy_id,
body,
**kwargs
):
"""Updates a policy. Cannot be used for creation. # noqa: E501
Update the policy (name, type, contentTypes, scopes) and also the dictionaries for a policy. Warning: If you send empty list of dictionaries during the update operation, then all the dictionaries for this policy are deleted and policy is automatically disabled. Note: The policy should already exist. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_policies_policy_id_put(session_token, key_manager_token, policy_id, body, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str): Unique dictionary identifier.
body (V1DLPPolicyRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
kwargs['body'] = \
body
return self.v1_dlp_policies_policy_id_put_endpoint.call_with_http_info(**kwargs)
def v1_dlp_policies_post(
self,
session_token,
key_manager_token,
body,
**kwargs
):
"""Creates a policy # noqa: E501
Creates a new policy with dictionary references. At the time of policy creation, the caller should only provide - contentTypes, name, scopes and type. The rest of the information is populated automatically. Note - You need to enable the policy after creation to start enforcing the policy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_dlp_policies_post(session_token, key_manager_token, body, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
body (V1DLPPolicyRequest): Details about the policy that should be created.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['body'] = \
body
return self.v1_dlp_policies_post_endpoint.call_with_http_info(**kwargs)
def v3_dlp_policies_get(
self,
session_token,
key_manager_token,
**kwargs
):
"""Get all policies # noqa: E501
Get all policies # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v3_dlp_policies_get(session_token, key_manager_token, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
Keyword Args:
page (int): Optional parameter to specify which page to return (default is 0). [optional]
limit (int): Optional parameter to specify the number of result to return per page, default is 50. Maximum is 50. . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V3DLPPoliciesCollectionResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
return self.v3_dlp_policies_get_endpoint.call_with_http_info(**kwargs)
def v3_dlp_policies_policy_id_delete_post(
self,
session_token,
key_manager_token,
policy_id,
**kwargs
):
"""Delete a policy # noqa: E501
Delete a policy. Note: Only disabled policy can be deleted # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v3_dlp_policies_policy_id_delete_post(session_token, key_manager_token, policy_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str): Unique dictionary identifier.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V3DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
return self.v3_dlp_policies_policy_id_delete_post_endpoint.call_with_http_info(**kwargs)
def v3_dlp_policies_policy_id_disable_post(
self,
session_token,
key_manager_token,
policy_id,
**kwargs
):
"""Disables a policy. # noqa: E501
Disables a policy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v3_dlp_policies_policy_id_disable_post(session_token, key_manager_token, policy_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V3DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
return self.v3_dlp_policies_policy_id_disable_post_endpoint.call_with_http_info(**kwargs)
def v3_dlp_policies_policy_id_enable_post(
self,
session_token,
key_manager_token,
policy_id,
**kwargs
):
"""Enables a policy. # noqa: E501
Enables a policy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v3_dlp_policies_policy_id_enable_post(session_token, key_manager_token, policy_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V3DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
return self.v3_dlp_policies_policy_id_enable_post_endpoint.call_with_http_info(**kwargs)
def v3_dlp_policies_policy_id_get(
self,
session_token,
key_manager_token,
policy_id,
**kwargs
):
"""Get a policy # noqa: E501
Get a policy # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v3_dlp_policies_policy_id_get(session_token, key_manager_token, policy_id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str): Unique dictionary identifier.
Keyword Args:
policy_version (str): Optional parameter, if set to be valid policy version number, will return policy with specified policyVersion. Otherwise, return the latest policy. . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V3DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
return self.v3_dlp_policies_policy_id_get_endpoint.call_with_http_info(**kwargs)
def v3_dlp_policies_policy_id_update_post(
self,
session_token,
key_manager_token,
policy_id,
body,
**kwargs
):
"""Updates a policy. Cannot be used for creation. # noqa: E501
Update the policy (name, type, contentTypes, scopes) and also the dictionaries for a policy. Warning: If you send empty list of dictionaries during the update operation, then all the dictionaries for this policy are deleted and policy is automatically disabled. Note: The policy should already exist. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v3_dlp_policies_policy_id_update_post(session_token, key_manager_token, policy_id, body, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
policy_id (str): Unique dictionary identifier.
body (V3DLPPolicyRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V3DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['policy_id'] = \
policy_id
kwargs['body'] = \
body
return self.v3_dlp_policies_policy_id_update_post_endpoint.call_with_http_info(**kwargs)
def v3_dlp_policies_post(
self,
session_token,
key_manager_token,
body,
**kwargs
):
"""Creates a policy # noqa: E501
Creates a new policy with dictionary references. At the time of policy creation, the caller should only provide - contentTypes, name, scopes and type. The rest of the information is populated automatically. Note - You need to enable the policy after creation to start enforcing the policy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v3_dlp_policies_post(session_token, key_manager_token, body, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
body (V3DLPPolicyRequest): Details about the policy that should be created.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V3DLPPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['body'] = \
body
return self.v3_dlp_policies_post_endpoint.call_with_http_info(**kwargs)
| 39.209085
| 1,445
| 0.514022
| 11,957
| 126,018
| 5.141591
| 0.031864
| 0.037737
| 0.051238
| 0.04099
| 0.948274
| 0.94616
| 0.943899
| 0.940694
| 0.923696
| 0.918068
| 0
| 0.005198
| 0.404601
| 126,018
| 3,213
| 1,446
| 39.221289
| 0.81417
| 0.378176
| 0
| 0.75069
| 0
| 0
| 0.249747
| 0.046781
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01012
| false
| 0
| 0.0069
| 0
| 0.027139
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
908518ae9c5b7a009d733db77a77bedaa867fc9f
| 99,814
|
py
|
Python
|
msgraph/cli/command_modules/groups/azext_groups/generated/commands.py
|
microsoftgraph/msgraph-cli-archived
|
489f70bf4ede1ce67b84bfb31e66da3e4db76062
|
[
"MIT"
] | null | null | null |
msgraph/cli/command_modules/groups/azext_groups/generated/commands.py
|
microsoftgraph/msgraph-cli-archived
|
489f70bf4ede1ce67b84bfb31e66da3e4db76062
|
[
"MIT"
] | 22
|
2022-03-29T22:54:37.000Z
|
2022-03-29T22:55:27.000Z
|
msgraph/cli/command_modules/groups/azext_groups/generated/commands.py
|
microsoftgraph/msgraph-cli-archived
|
489f70bf4ede1ce67b84bfb31e66da3e4db76062
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-statements
# pylint: disable=too-many-locals
# pylint: disable=bad-continuation
# pylint: disable=line-too-long
from azure.cli.core.commands import CliCommandType
from azext_groups.generated._client_factory import (
cf_group_lifecycle_policy_group_lifecycle_policy,
cf_group_lifecycle_policy,
cf_group_group,
cf_group,
cf_group_calendar_calendar_view_attachment,
cf_group_calendar_calendar_view_calendar,
cf_group_calendar_calendar_view_instance,
cf_group_calendar_calendar_view,
cf_group_calendar_event_attachment,
cf_group_calendar_event_calendar,
cf_group_calendar_event_instance,
cf_group_calendar_event,
cf_group_calendar,
cf_group_calendar_view_attachment,
cf_group_calendar_view_calendar_calendar_view,
cf_group_calendar_view_calendar_event,
cf_group_calendar_view_calendar,
cf_group_calendar_view_instance,
cf_group_calendar_view,
cf_group_conversation,
cf_group_conversation_thread,
cf_group_conversation_thread_post,
cf_group_conversation_thread_post_attachment,
cf_group_conversation_thread_post_in_reply_to,
cf_group_event_attachment,
cf_group_event_calendar_calendar_view,
cf_group_event_calendar_event,
cf_group_event_calendar,
cf_group_event_instance,
cf_group_event,
cf_group_onenote_notebook,
cf_group_onenote_notebook_section_group_parent_notebook,
cf_group_onenote_notebook_section_group_section,
cf_group_onenote_notebook_section_group_section_page,
cf_group_onenote_notebook_section_group_section_page_parent_notebook,
cf_group_onenote_notebook_section_group_section_page_parent_section,
cf_group_onenote_notebook_section_group_section_parent_notebook,
cf_group_onenote_notebook_section,
cf_group_onenote_notebook_section_page,
cf_group_onenote_notebook_section_page_parent_notebook,
cf_group_onenote_notebook_section_page_parent_section,
cf_group_onenote_notebook_section_parent_notebook,
cf_group_onenote_notebook_section_parent_section_group_parent_notebook,
cf_group_onenote_notebook_section_parent_section_group_section,
cf_group_onenote_page,
cf_group_onenote_page_parent_notebook,
cf_group_onenote_page_parent_notebook_section_group_parent_notebook,
cf_group_onenote_page_parent_notebook_section_group_section,
cf_group_onenote_page_parent_notebook_section_group_section_page,
cf_group_onenote_page_parent_notebook_section_group_section_parent_notebook,
cf_group_onenote_page_parent_notebook_section,
cf_group_onenote_page_parent_notebook_section_page,
cf_group_onenote_page_parent_notebook_section_parent_notebook,
cf_group_onenote_page_parent_notebook_section_parent_section_group_parent_notebook,
cf_group_onenote_page_parent_notebook_section_parent_section_group_section,
cf_group_onenote_page_parent_section,
cf_group_onenote_page_parent_section_page,
cf_group_onenote_page_parent_section_parent_notebook,
cf_group_onenote_page_parent_section_parent_notebook_section_group_parent_notebook,
cf_group_onenote_page_parent_section_parent_notebook_section_group_section,
cf_group_onenote_page_parent_section_parent_notebook_section,
cf_group_onenote_page_parent_section_parent_section_group_parent_notebook,
cf_group_onenote_page_parent_section_parent_section_group_parent_notebook_section,
cf_group_onenote_page_parent_section_parent_section_group_section,
cf_group_onenote_section_group_parent_notebook,
cf_group_onenote_section_group_parent_notebook_section,
cf_group_onenote_section_group_parent_notebook_section_page,
cf_group_onenote_section_group_parent_notebook_section_page_parent_notebook,
cf_group_onenote_section_group_parent_notebook_section_page_parent_section,
cf_group_onenote_section_group_parent_notebook_section_parent_notebook,
cf_group_onenote_section_group_section,
cf_group_onenote_section_group_section_page,
cf_group_onenote_section_group_section_page_parent_notebook,
cf_group_onenote_section_group_section_page_parent_notebook_section,
cf_group_onenote_section_group_section_page_parent_section,
cf_group_onenote_section_group_section_parent_notebook,
cf_group_onenote_section_group_section_parent_notebook_section,
cf_group_onenote_section,
cf_group_onenote_section_page,
cf_group_onenote_section_page_parent_notebook,
cf_group_onenote_section_page_parent_notebook_section_group_parent_notebook,
cf_group_onenote_section_page_parent_notebook_section_group_section,
cf_group_onenote_section_page_parent_notebook_section,
cf_group_onenote_section_page_parent_section,
cf_group_onenote_section_parent_notebook,
cf_group_onenote_section_parent_notebook_section_group_parent_notebook,
cf_group_onenote_section_parent_notebook_section_group_section,
cf_group_onenote_section_parent_notebook_section,
cf_group_onenote_section_parent_section_group_parent_notebook,
cf_group_onenote_section_parent_section_group_parent_notebook_section,
cf_group_onenote_section_parent_section_group_section,
cf_group_thread,
cf_group_thread_post,
cf_group_thread_post_attachment,
cf_group_thread_post_in_reply_to,
)
groups_group = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_operations#GroupsOperations.{}',
client_factory=cf_group,
)
groups_group_calendar = CliCommandType(
operations_tmpl=(
'azext_groups.vendored_sdks.groups.operations._groups_calendar_operations#GroupsCalendarOperations.{}'
),
client_factory=cf_group_calendar,
)
groups_group_calendar_calendar_view = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_calendar_view_operations#GroupsCalendarCalendarViewOperations.{}',
client_factory=cf_group_calendar_calendar_view,
)
groups_group_calendar_calendar_view_attachment = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_calendar_view_attachments_operations#GroupsCalendarCalendarViewAttachmentsOperations.{}',
client_factory=cf_group_calendar_calendar_view_attachment,
)
groups_group_calendar_calendar_view_calendar = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_calendar_view_calendar_operations#GroupsCalendarCalendarViewCalendarOperations.{}',
client_factory=cf_group_calendar_calendar_view_calendar,
)
groups_group_calendar_calendar_view_instance = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_calendar_view_instances_operations#GroupsCalendarCalendarViewInstancesOperations.{}',
client_factory=cf_group_calendar_calendar_view_instance,
)
groups_group_calendar_event = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_events_operations#GroupsCalendarEventsOperations.{}',
client_factory=cf_group_calendar_event,
)
groups_group_calendar_event_attachment = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_events_attachments_operations#GroupsCalendarEventsAttachmentsOperations.{}',
client_factory=cf_group_calendar_event_attachment,
)
groups_group_calendar_event_calendar = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_events_calendar_operations#GroupsCalendarEventsCalendarOperations.{}',
client_factory=cf_group_calendar_event_calendar,
)
groups_group_calendar_event_instance = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_events_instances_operations#GroupsCalendarEventsInstancesOperations.{}',
client_factory=cf_group_calendar_event_instance,
)
groups_group_calendar_view = CliCommandType(
operations_tmpl=(
'azext_groups.vendored_sdks.groups.operations._groups_calendar_view_operations#GroupsCalendarViewOperations.{}'
),
client_factory=cf_group_calendar_view,
)
groups_group_calendar_view_attachment = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_view_attachments_operations#GroupsCalendarViewAttachmentsOperations.{}',
client_factory=cf_group_calendar_view_attachment,
)
groups_group_calendar_view_calendar = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_view_calendar_operations#GroupsCalendarViewCalendarOperations.{}',
client_factory=cf_group_calendar_view_calendar,
)
groups_group_calendar_view_calendar_calendar_view = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_view_calendar_calendar_view_operations#GroupsCalendarViewCalendarCalendarViewOperations.{}',
client_factory=cf_group_calendar_view_calendar_calendar_view,
)
groups_group_calendar_view_calendar_event = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_view_calendar_events_operations#GroupsCalendarViewCalendarEventsOperations.{}',
client_factory=cf_group_calendar_view_calendar_event,
)
groups_group_calendar_view_instance = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_calendar_view_instances_operations#GroupsCalendarViewInstancesOperations.{}',
client_factory=cf_group_calendar_view_instance,
)
groups_group_conversation = CliCommandType(
operations_tmpl=(
'azext_groups.vendored_sdks.groups.operations._groups_conversations_operations#GroupsConversationsOperations.{}'
),
client_factory=cf_group_conversation,
)
groups_group_conversation_thread = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_conversations_threads_operations#GroupsConversationsThreadsOperations.{}',
client_factory=cf_group_conversation_thread,
)
groups_group_conversation_thread_post = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_conversations_threads_posts_operations#GroupsConversationsThreadsPostsOperations.{}',
client_factory=cf_group_conversation_thread_post,
)
groups_group_conversation_thread_post_attachment = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_conversations_threads_posts_attachments_operations#GroupsConversationsThreadsPostsAttachmentsOperations.{}',
client_factory=cf_group_conversation_thread_post_attachment,
)
groups_group_conversation_thread_post_in_reply_to = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_conversations_threads_posts_in_reply_to_operations#GroupsConversationsThreadsPostsInReplyToOperations.{}',
client_factory=cf_group_conversation_thread_post_in_reply_to,
)
groups_group_event = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_events_operations#GroupsEventsOperations.{}',
client_factory=cf_group_event,
)
groups_group_event_attachment = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_events_attachments_operations#GroupsEventsAttachmentsOperations.{}',
client_factory=cf_group_event_attachment,
)
groups_group_event_calendar = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_events_calendar_operations#GroupsEventsCalendarOperations.{}',
client_factory=cf_group_event_calendar,
)
groups_group_event_calendar_calendar_view = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_events_calendar_calendar_view_operations#GroupsEventsCalendarCalendarViewOperations.{}',
client_factory=cf_group_event_calendar_calendar_view,
)
groups_group_event_calendar_event = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_events_calendar_events_operations#GroupsEventsCalendarEventsOperations.{}',
client_factory=cf_group_event_calendar_event,
)
groups_group_event_instance = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_events_instances_operations#GroupsEventsInstancesOperations.{}',
client_factory=cf_group_event_instance,
)
groups_group_group = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_group_operations#GroupsGroupOperations.{}',
client_factory=cf_group_group,
)
groups_group_lifecycle_policy = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._group_lifecycle_policies_operations#GroupLifecyclePoliciesOperations.{}',
client_factory=cf_group_lifecycle_policy,
)
groups_group_lifecycle_policy_group_lifecycle_policy = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._group_lifecycle_policies_group_lifecycle_policy_operations#GroupLifecyclePoliciesGroupLifecyclePolicyOperations.{}',
client_factory=cf_group_lifecycle_policy_group_lifecycle_policy,
)
groups_group_onenote_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_operations#GroupsOnenoteNotebooksOperations.{}',
client_factory=cf_group_onenote_notebook,
)
groups_group_onenote_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_sections_operations#GroupsOnenoteNotebooksSectionsOperations.{}',
client_factory=cf_group_onenote_notebook_section,
)
groups_group_onenote_notebook_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_section_groups_parent_notebook_operations#GroupsOnenoteNotebooksSectionGroupsParentNotebookOperations.{}',
client_factory=cf_group_onenote_notebook_section_group_parent_notebook,
)
groups_group_onenote_notebook_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_section_groups_sections_operations#GroupsOnenoteNotebooksSectionGroupsSectionsOperations.{}',
client_factory=cf_group_onenote_notebook_section_group_section,
)
groups_group_onenote_notebook_section_group_section_page = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_section_groups_sections_pages_operations#GroupsOnenoteNotebooksSectionGroupsSectionsPagesOperations.{}',
client_factory=cf_group_onenote_notebook_section_group_section_page,
)
groups_group_onenote_notebook_section_group_section_page_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_section_groups_sections_pages_parent_notebook_operations#GroupsOnenoteNotebooksSectionGroupsSectionsPagesParentNotebookOperations.{}',
client_factory=cf_group_onenote_notebook_section_group_section_page_parent_notebook,
)
groups_group_onenote_notebook_section_group_section_page_parent_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_section_groups_sections_pages_parent_section_operations#GroupsOnenoteNotebooksSectionGroupsSectionsPagesParentSectionOperations.{}',
client_factory=cf_group_onenote_notebook_section_group_section_page_parent_section,
)
groups_group_onenote_notebook_section_group_section_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_section_groups_sections_parent_notebook_operations#GroupsOnenoteNotebooksSectionGroupsSectionsParentNotebookOperations.{}',
client_factory=cf_group_onenote_notebook_section_group_section_parent_notebook,
)
groups_group_onenote_notebook_section_page = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_sections_pages_operations#GroupsOnenoteNotebooksSectionsPagesOperations.{}',
client_factory=cf_group_onenote_notebook_section_page,
)
groups_group_onenote_notebook_section_page_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_sections_pages_parent_notebook_operations#GroupsOnenoteNotebooksSectionsPagesParentNotebookOperations.{}',
client_factory=cf_group_onenote_notebook_section_page_parent_notebook,
)
groups_group_onenote_notebook_section_page_parent_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_sections_pages_parent_section_operations#GroupsOnenoteNotebooksSectionsPagesParentSectionOperations.{}',
client_factory=cf_group_onenote_notebook_section_page_parent_section,
)
groups_group_onenote_notebook_section_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_sections_parent_notebook_operations#GroupsOnenoteNotebooksSectionsParentNotebookOperations.{}',
client_factory=cf_group_onenote_notebook_section_parent_notebook,
)
groups_group_onenote_notebook_section_parent_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_sections_parent_section_group_parent_notebook_operations#GroupsOnenoteNotebooksSectionsParentSectionGroupParentNotebookOperations.{}',
client_factory=cf_group_onenote_notebook_section_parent_section_group_parent_notebook,
)
groups_group_onenote_notebook_section_parent_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_notebooks_sections_parent_section_group_sections_operations#GroupsOnenoteNotebooksSectionsParentSectionGroupSectionsOperations.{}',
client_factory=cf_group_onenote_notebook_section_parent_section_group_section,
)
groups_group_onenote_page = CliCommandType(
operations_tmpl=(
'azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_operations#GroupsOnenotePagesOperations.{}'
),
client_factory=cf_group_onenote_page,
)
groups_group_onenote_page_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_operations#GroupsOnenotePagesParentNotebookOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook,
)
groups_group_onenote_page_parent_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_sections_operations#GroupsOnenotePagesParentNotebookSectionsOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook_section,
)
groups_group_onenote_page_parent_notebook_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_section_groups_parent_notebook_operations#GroupsOnenotePagesParentNotebookSectionGroupsParentNotebookOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook_section_group_parent_notebook,
)
groups_group_onenote_page_parent_notebook_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_section_groups_sections_operations#GroupsOnenotePagesParentNotebookSectionGroupsSectionsOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook_section_group_section,
)
groups_group_onenote_page_parent_notebook_section_group_section_page = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_section_groups_sections_pages_operations#GroupsOnenotePagesParentNotebookSectionGroupsSectionsPagesOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook_section_group_section_page,
)
groups_group_onenote_page_parent_notebook_section_group_section_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_section_groups_sections_parent_notebook_operations#GroupsOnenotePagesParentNotebookSectionGroupsSectionsParentNotebookOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook_section_group_section_parent_notebook,
)
groups_group_onenote_page_parent_notebook_section_page = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_sections_pages_operations#GroupsOnenotePagesParentNotebookSectionsPagesOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook_section_page,
)
groups_group_onenote_page_parent_notebook_section_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_sections_parent_notebook_operations#GroupsOnenotePagesParentNotebookSectionsParentNotebookOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook_section_parent_notebook,
)
groups_group_onenote_page_parent_notebook_section_parent_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_sections_parent_section_group_parent_notebook_operations#GroupsOnenotePagesParentNotebookSectionsParentSectionGroupParentNotebookOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook_section_parent_section_group_parent_notebook,
)
groups_group_onenote_page_parent_notebook_section_parent_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_notebook_sections_parent_section_group_sections_operations#GroupsOnenotePagesParentNotebookSectionsParentSectionGroupSectionsOperations.{}',
client_factory=cf_group_onenote_page_parent_notebook_section_parent_section_group_section,
)
groups_group_onenote_page_parent_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_section_operations#GroupsOnenotePagesParentSectionOperations.{}',
client_factory=cf_group_onenote_page_parent_section,
)
groups_group_onenote_page_parent_section_page = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_section_pages_operations#GroupsOnenotePagesParentSectionPagesOperations.{}',
client_factory=cf_group_onenote_page_parent_section_page,
)
groups_group_onenote_page_parent_section_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_section_parent_notebook_operations#GroupsOnenotePagesParentSectionParentNotebookOperations.{}',
client_factory=cf_group_onenote_page_parent_section_parent_notebook,
)
groups_group_onenote_page_parent_section_parent_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_section_parent_notebook_sections_operations#GroupsOnenotePagesParentSectionParentNotebookSectionsOperations.{}',
client_factory=cf_group_onenote_page_parent_section_parent_notebook_section,
)
groups_group_onenote_page_parent_section_parent_notebook_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_section_parent_notebook_section_groups_parent_notebook_operations#GroupsOnenotePagesParentSectionParentNotebookSectionGroupsParentNotebookOperations.{}',
client_factory=cf_group_onenote_page_parent_section_parent_notebook_section_group_parent_notebook,
)
groups_group_onenote_page_parent_section_parent_notebook_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_section_parent_notebook_section_groups_sections_operations#GroupsOnenotePagesParentSectionParentNotebookSectionGroupsSectionsOperations.{}',
client_factory=cf_group_onenote_page_parent_section_parent_notebook_section_group_section,
)
groups_group_onenote_page_parent_section_parent_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_section_parent_section_group_parent_notebook_operations#GroupsOnenotePagesParentSectionParentSectionGroupParentNotebookOperations.{}',
client_factory=cf_group_onenote_page_parent_section_parent_section_group_parent_notebook,
)
groups_group_onenote_page_parent_section_parent_section_group_parent_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_section_parent_section_group_parent_notebook_sections_operations#GroupsOnenotePagesParentSectionParentSectionGroupParentNotebookSectionsOperations.{}',
client_factory=cf_group_onenote_page_parent_section_parent_section_group_parent_notebook_section,
)
groups_group_onenote_page_parent_section_parent_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_pages_parent_section_parent_section_group_sections_operations#GroupsOnenotePagesParentSectionParentSectionGroupSectionsOperations.{}',
client_factory=cf_group_onenote_page_parent_section_parent_section_group_section,
)
groups_group_onenote_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_operations#GroupsOnenoteSectionsOperations.{}',
client_factory=cf_group_onenote_section,
)
groups_group_onenote_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_parent_notebook_operations#GroupsOnenoteSectionGroupsParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_group_parent_notebook,
)
groups_group_onenote_section_group_parent_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_parent_notebook_sections_operations#GroupsOnenoteSectionGroupsParentNotebookSectionsOperations.{}',
client_factory=cf_group_onenote_section_group_parent_notebook_section,
)
groups_group_onenote_section_group_parent_notebook_section_page = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_parent_notebook_sections_pages_operations#GroupsOnenoteSectionGroupsParentNotebookSectionsPagesOperations.{}',
client_factory=cf_group_onenote_section_group_parent_notebook_section_page,
)
groups_group_onenote_section_group_parent_notebook_section_page_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_parent_notebook_sections_pages_parent_notebook_operations#GroupsOnenoteSectionGroupsParentNotebookSectionsPagesParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_group_parent_notebook_section_page_parent_notebook,
)
groups_group_onenote_section_group_parent_notebook_section_page_parent_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_parent_notebook_sections_pages_parent_section_operations#GroupsOnenoteSectionGroupsParentNotebookSectionsPagesParentSectionOperations.{}',
client_factory=cf_group_onenote_section_group_parent_notebook_section_page_parent_section,
)
groups_group_onenote_section_group_parent_notebook_section_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_parent_notebook_sections_parent_notebook_operations#GroupsOnenoteSectionGroupsParentNotebookSectionsParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_group_parent_notebook_section_parent_notebook,
)
groups_group_onenote_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_sections_operations#GroupsOnenoteSectionGroupsSectionsOperations.{}',
client_factory=cf_group_onenote_section_group_section,
)
groups_group_onenote_section_group_section_page = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_sections_pages_operations#GroupsOnenoteSectionGroupsSectionsPagesOperations.{}',
client_factory=cf_group_onenote_section_group_section_page,
)
groups_group_onenote_section_group_section_page_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_sections_pages_parent_notebook_operations#GroupsOnenoteSectionGroupsSectionsPagesParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_group_section_page_parent_notebook,
)
groups_group_onenote_section_group_section_page_parent_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_sections_pages_parent_notebook_sections_operations#GroupsOnenoteSectionGroupsSectionsPagesParentNotebookSectionsOperations.{}',
client_factory=cf_group_onenote_section_group_section_page_parent_notebook_section,
)
groups_group_onenote_section_group_section_page_parent_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_sections_pages_parent_section_operations#GroupsOnenoteSectionGroupsSectionsPagesParentSectionOperations.{}',
client_factory=cf_group_onenote_section_group_section_page_parent_section,
)
groups_group_onenote_section_group_section_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_sections_parent_notebook_operations#GroupsOnenoteSectionGroupsSectionsParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_group_section_parent_notebook,
)
groups_group_onenote_section_group_section_parent_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_section_groups_sections_parent_notebook_sections_operations#GroupsOnenoteSectionGroupsSectionsParentNotebookSectionsOperations.{}',
client_factory=cf_group_onenote_section_group_section_parent_notebook_section,
)
groups_group_onenote_section_page = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_pages_operations#GroupsOnenoteSectionsPagesOperations.{}',
client_factory=cf_group_onenote_section_page,
)
groups_group_onenote_section_page_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_pages_parent_notebook_operations#GroupsOnenoteSectionsPagesParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_page_parent_notebook,
)
groups_group_onenote_section_page_parent_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_pages_parent_notebook_sections_operations#GroupsOnenoteSectionsPagesParentNotebookSectionsOperations.{}',
client_factory=cf_group_onenote_section_page_parent_notebook_section,
)
groups_group_onenote_section_page_parent_notebook_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_pages_parent_notebook_section_groups_parent_notebook_operations#GroupsOnenoteSectionsPagesParentNotebookSectionGroupsParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_page_parent_notebook_section_group_parent_notebook,
)
groups_group_onenote_section_page_parent_notebook_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_pages_parent_notebook_section_groups_sections_operations#GroupsOnenoteSectionsPagesParentNotebookSectionGroupsSectionsOperations.{}',
client_factory=cf_group_onenote_section_page_parent_notebook_section_group_section,
)
groups_group_onenote_section_page_parent_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_pages_parent_section_operations#GroupsOnenoteSectionsPagesParentSectionOperations.{}',
client_factory=cf_group_onenote_section_page_parent_section,
)
groups_group_onenote_section_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_parent_notebook_operations#GroupsOnenoteSectionsParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_parent_notebook,
)
groups_group_onenote_section_parent_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_parent_notebook_sections_operations#GroupsOnenoteSectionsParentNotebookSectionsOperations.{}',
client_factory=cf_group_onenote_section_parent_notebook_section,
)
groups_group_onenote_section_parent_notebook_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_parent_notebook_section_groups_parent_notebook_operations#GroupsOnenoteSectionsParentNotebookSectionGroupsParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_parent_notebook_section_group_parent_notebook,
)
groups_group_onenote_section_parent_notebook_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_parent_notebook_section_groups_sections_operations#GroupsOnenoteSectionsParentNotebookSectionGroupsSectionsOperations.{}',
client_factory=cf_group_onenote_section_parent_notebook_section_group_section,
)
groups_group_onenote_section_parent_section_group_parent_notebook = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_parent_section_group_parent_notebook_operations#GroupsOnenoteSectionsParentSectionGroupParentNotebookOperations.{}',
client_factory=cf_group_onenote_section_parent_section_group_parent_notebook,
)
groups_group_onenote_section_parent_section_group_parent_notebook_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_parent_section_group_parent_notebook_sections_operations#GroupsOnenoteSectionsParentSectionGroupParentNotebookSectionsOperations.{}',
client_factory=cf_group_onenote_section_parent_section_group_parent_notebook_section,
)
groups_group_onenote_section_parent_section_group_section = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_onenote_sections_parent_section_group_sections_operations#GroupsOnenoteSectionsParentSectionGroupSectionsOperations.{}',
client_factory=cf_group_onenote_section_parent_section_group_section,
)
groups_group_thread = CliCommandType(
operations_tmpl=(
'azext_groups.vendored_sdks.groups.operations._groups_threads_operations#GroupsThreadsOperations.{}'
),
client_factory=cf_group_thread,
)
groups_group_thread_post = CliCommandType(
operations_tmpl=(
'azext_groups.vendored_sdks.groups.operations._groups_threads_posts_operations#GroupsThreadsPostsOperations.{}'
),
client_factory=cf_group_thread_post,
)
groups_group_thread_post_attachment = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_threads_posts_attachments_operations#GroupsThreadsPostsAttachmentsOperations.{}',
client_factory=cf_group_thread_post_attachment,
)
groups_group_thread_post_in_reply_to = CliCommandType(
operations_tmpl='azext_groups.vendored_sdks.groups.operations._groups_threads_posts_in_reply_to_operations#GroupsThreadsPostsInReplyToOperations.{}',
client_factory=cf_group_thread_post_in_reply_to,
)
def load_command_table(self, _):
with self.command_group('groups group', groups_group, client_factory=cf_group) as g:
g.custom_command('add-favorite', 'groups_group_add_favorite')
g.custom_command('assign-license', 'groups_group_assign_license')
g.custom_command('check-granted-permission-for-app', 'groups_group_check_granted_permission_for_app')
g.custom_command('check-member-group', 'groups_group_check_member_group')
g.custom_command('check-member-object', 'groups_group_check_member_object')
g.custom_command('create-conversation', 'groups_group_create_conversation')
g.custom_command('create-extension', 'groups_group_create_extension')
g.custom_command('create-permission-grant', 'groups_group_create_permission_grant')
g.custom_command('create-photo', 'groups_group_create_photo')
g.custom_command('create-ref-accepted-sender', 'groups_group_create_ref_accepted_sender')
g.custom_command('create-ref-member', 'groups_group_create_ref_member')
g.custom_command('create-ref-member-of', 'groups_group_create_ref_member_of')
g.custom_command('create-ref-member-with-license-error', 'groups_group_create_ref_member_with_license_error')
g.custom_command('create-ref-owner', 'groups_group_create_ref_owner')
g.custom_command('create-ref-rejected-sender', 'groups_group_create_ref_rejected_sender')
g.custom_command('create-ref-transitive-member', 'groups_group_create_ref_transitive_member')
g.custom_command('create-ref-transitive-member-of', 'groups_group_create_ref_transitive_member_of')
g.custom_command('create-thread', 'groups_group_create_thread')
g.custom_command('delete-conversation', 'groups_group_delete_conversation')
g.custom_command('delete-extension', 'groups_group_delete_extension')
g.custom_command('delete-permission-grant', 'groups_group_delete_permission_grant')
g.custom_command('delete-photo', 'groups_group_delete_photo')
g.custom_command('delete-ref-created-on-behalf-of', 'groups_group_delete_ref_created_on_behalf_of')
g.custom_command('delete-thread', 'groups_group_delete_thread')
g.custom_command('delta', 'groups_group_delta')
g.custom_command('get-available-extension-property', 'groups_group_get_available_extension_property')
g.custom_command('get-by-id', 'groups_group_get_by_id')
g.custom_command('get-member-group', 'groups_group_get_member_group')
g.custom_command('get-member-object', 'groups_group_get_member_object')
g.custom_command('list-accepted-sender', 'groups_group_list_accepted_sender')
g.custom_command('list-conversation', 'groups_group_list_conversation')
g.custom_command('list-extension', 'groups_group_list_extension')
g.custom_command('list-member', 'groups_group_list_member')
g.custom_command('list-member-of', 'groups_group_list_member_of')
g.custom_command('list-member-with-license-error', 'groups_group_list_member_with_license_error')
g.custom_command('list-owner', 'groups_group_list_owner')
g.custom_command('list-permission-grant', 'groups_group_list_permission_grant')
g.custom_command('list-photo', 'groups_group_list_photo')
g.custom_command('list-ref-accepted-sender', 'groups_group_list_ref_accepted_sender')
g.custom_command('list-ref-member', 'groups_group_list_ref_member')
g.custom_command('list-ref-member-of', 'groups_group_list_ref_member_of')
g.custom_command('list-ref-member-with-license-error', 'groups_group_list_ref_member_with_license_error')
g.custom_command('list-ref-owner', 'groups_group_list_ref_owner')
g.custom_command('list-ref-rejected-sender', 'groups_group_list_ref_rejected_sender')
g.custom_command('list-ref-transitive-member', 'groups_group_list_ref_transitive_member')
g.custom_command('list-ref-transitive-member-of', 'groups_group_list_ref_transitive_member_of')
g.custom_command('list-rejected-sender', 'groups_group_list_rejected_sender')
g.custom_command('list-thread', 'groups_group_list_thread')
g.custom_command('list-transitive-member', 'groups_group_list_transitive_member')
g.custom_command('list-transitive-member-of', 'groups_group_list_transitive_member_of')
g.custom_command('remove-favorite', 'groups_group_remove_favorite')
g.custom_command('renew', 'groups_group_renew')
g.custom_command('reset-unseen-count', 'groups_group_reset_unseen_count')
g.custom_command('restore', 'groups_group_restore')
g.custom_command('set-photo-content', 'groups_group_set_photo_content')
g.custom_command('set-ref-created-on-behalf-of', 'groups_group_set_ref_created_on_behalf_of')
g.custom_command('show-conversation', 'groups_group_show_conversation')
g.custom_command('show-created-on-behalf-of', 'groups_group_show_created_on_behalf_of')
g.custom_command('show-extension', 'groups_group_show_extension')
g.custom_command('show-permission-grant', 'groups_group_show_permission_grant')
g.custom_command('show-photo', 'groups_group_show_photo')
g.custom_command('show-photo-content', 'groups_group_show_photo_content')
g.custom_command('show-ref-created-on-behalf-of', 'groups_group_show_ref_created_on_behalf_of')
g.custom_command('show-thread', 'groups_group_show_thread')
g.custom_command('subscribe-by-mail', 'groups_group_subscribe_by_mail')
g.custom_command('unsubscribe-by-mail', 'groups_group_unsubscribe_by_mail')
g.custom_command('update-conversation', 'groups_group_update_conversation')
g.custom_command('update-extension', 'groups_group_update_extension')
g.custom_command('update-permission-grant', 'groups_group_update_permission_grant')
g.custom_command('update-photo', 'groups_group_update_photo')
g.custom_command('update-thread', 'groups_group_update_thread')
g.custom_command('validate-property', 'groups_group_validate_property')
with self.command_group('groups group-calendar', groups_group_calendar, client_factory=cf_group_calendar) as g:
g.custom_command('allowed-calendar-sharing-role', 'groups_group_calendar_allowed_calendar_sharing_role')
g.custom_command('get-schedule', 'groups_group_calendar_get_schedule')
with self.command_group(
'groups group-calendar-calendar-view',
groups_group_calendar_calendar_view,
client_factory=cf_group_calendar_calendar_view,
) as g:
g.custom_command('accept', 'groups_group_calendar_calendar_view_accept')
g.custom_command('cancel', 'groups_group_calendar_calendar_view_cancel')
g.custom_command('decline', 'groups_group_calendar_calendar_view_decline')
g.custom_command('delta', 'groups_group_calendar_calendar_view_delta')
g.custom_command('dismiss-reminder', 'groups_group_calendar_calendar_view_dismiss_reminder')
g.custom_command('forward', 'groups_group_calendar_calendar_view_forward')
g.custom_command('snooze-reminder', 'groups_group_calendar_calendar_view_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_calendar_calendar_view_tentatively_accept')
with self.command_group(
'groups group-calendar-calendar-view-attachment',
groups_group_calendar_calendar_view_attachment,
client_factory=cf_group_calendar_calendar_view_attachment,
) as g:
g.custom_command(
'create-upload-session', 'groups_group_calendar_calendar_view_attachment_create_upload_session'
)
with self.command_group(
'groups group-calendar-calendar-view-calendar',
groups_group_calendar_calendar_view_calendar,
client_factory=cf_group_calendar_calendar_view_calendar,
) as g:
g.custom_command(
'allowed-calendar-sharing-role',
'groups_group_calendar_calendar_view_calendar_allowed_calendar_sharing_role',
)
g.custom_command('get-schedule', 'groups_group_calendar_calendar_view_calendar_get_schedule')
with self.command_group(
'groups group-calendar-calendar-view-instance',
groups_group_calendar_calendar_view_instance,
client_factory=cf_group_calendar_calendar_view_instance,
) as g:
g.custom_command('accept', 'groups_group_calendar_calendar_view_instance_accept')
g.custom_command('cancel', 'groups_group_calendar_calendar_view_instance_cancel')
g.custom_command('decline', 'groups_group_calendar_calendar_view_instance_decline')
g.custom_command('delta', 'groups_group_calendar_calendar_view_instance_delta')
g.custom_command('dismiss-reminder', 'groups_group_calendar_calendar_view_instance_dismiss_reminder')
g.custom_command('forward', 'groups_group_calendar_calendar_view_instance_forward')
g.custom_command('snooze-reminder', 'groups_group_calendar_calendar_view_instance_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_calendar_calendar_view_instance_tentatively_accept')
with self.command_group(
'groups group-calendar-event', groups_group_calendar_event, client_factory=cf_group_calendar_event
) as g:
g.custom_command('accept', 'groups_group_calendar_event_accept')
g.custom_command('cancel', 'groups_group_calendar_event_cancel')
g.custom_command('decline', 'groups_group_calendar_event_decline')
g.custom_command('delta', 'groups_group_calendar_event_delta')
g.custom_command('dismiss-reminder', 'groups_group_calendar_event_dismiss_reminder')
g.custom_command('forward', 'groups_group_calendar_event_forward')
g.custom_command('snooze-reminder', 'groups_group_calendar_event_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_calendar_event_tentatively_accept')
with self.command_group(
'groups group-calendar-event-attachment',
groups_group_calendar_event_attachment,
client_factory=cf_group_calendar_event_attachment,
) as g:
g.custom_command('create-upload-session', 'groups_group_calendar_event_attachment_create_upload_session')
with self.command_group(
'groups group-calendar-event-calendar',
groups_group_calendar_event_calendar,
client_factory=cf_group_calendar_event_calendar,
) as g:
g.custom_command(
'allowed-calendar-sharing-role', 'groups_group_calendar_event_calendar_allowed_calendar_sharing_role'
)
g.custom_command('get-schedule', 'groups_group_calendar_event_calendar_get_schedule')
with self.command_group(
'groups group-calendar-event-instance',
groups_group_calendar_event_instance,
client_factory=cf_group_calendar_event_instance,
) as g:
g.custom_command('accept', 'groups_group_calendar_event_instance_accept')
g.custom_command('cancel', 'groups_group_calendar_event_instance_cancel')
g.custom_command('decline', 'groups_group_calendar_event_instance_decline')
g.custom_command('delta', 'groups_group_calendar_event_instance_delta')
g.custom_command('dismiss-reminder', 'groups_group_calendar_event_instance_dismiss_reminder')
g.custom_command('forward', 'groups_group_calendar_event_instance_forward')
g.custom_command('snooze-reminder', 'groups_group_calendar_event_instance_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_calendar_event_instance_tentatively_accept')
with self.command_group(
'groups group-calendar-view', groups_group_calendar_view, client_factory=cf_group_calendar_view
) as g:
g.custom_command('accept', 'groups_group_calendar_view_accept')
g.custom_command('cancel', 'groups_group_calendar_view_cancel')
g.custom_command('decline', 'groups_group_calendar_view_decline')
g.custom_command('delta', 'groups_group_calendar_view_delta')
g.custom_command('dismiss-reminder', 'groups_group_calendar_view_dismiss_reminder')
g.custom_command('forward', 'groups_group_calendar_view_forward')
g.custom_command('snooze-reminder', 'groups_group_calendar_view_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_calendar_view_tentatively_accept')
with self.command_group(
'groups group-calendar-view-attachment',
groups_group_calendar_view_attachment,
client_factory=cf_group_calendar_view_attachment,
) as g:
g.custom_command('create-upload-session', 'groups_group_calendar_view_attachment_create_upload_session')
with self.command_group(
'groups group-calendar-view-calendar',
groups_group_calendar_view_calendar,
client_factory=cf_group_calendar_view_calendar,
) as g:
g.custom_command(
'allowed-calendar-sharing-role', 'groups_group_calendar_view_calendar_allowed_calendar_sharing_role'
)
g.custom_command('get-schedule', 'groups_group_calendar_view_calendar_get_schedule')
with self.command_group(
'groups group-calendar-view-calendar-calendar-view',
groups_group_calendar_view_calendar_calendar_view,
client_factory=cf_group_calendar_view_calendar_calendar_view,
) as g:
g.custom_command('accept', 'groups_group_calendar_view_calendar_calendar_view_accept')
g.custom_command('cancel', 'groups_group_calendar_view_calendar_calendar_view_cancel')
g.custom_command('decline', 'groups_group_calendar_view_calendar_calendar_view_decline')
g.custom_command('delta', 'groups_group_calendar_view_calendar_calendar_view_delta')
g.custom_command('dismiss-reminder', 'groups_group_calendar_view_calendar_calendar_view_dismiss_reminder')
g.custom_command('forward', 'groups_group_calendar_view_calendar_calendar_view_forward')
g.custom_command('snooze-reminder', 'groups_group_calendar_view_calendar_calendar_view_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_calendar_view_calendar_calendar_view_tentatively_accept')
with self.command_group(
'groups group-calendar-view-calendar-event',
groups_group_calendar_view_calendar_event,
client_factory=cf_group_calendar_view_calendar_event,
) as g:
g.custom_command('accept', 'groups_group_calendar_view_calendar_event_accept')
g.custom_command('cancel', 'groups_group_calendar_view_calendar_event_cancel')
g.custom_command('decline', 'groups_group_calendar_view_calendar_event_decline')
g.custom_command('delta', 'groups_group_calendar_view_calendar_event_delta')
g.custom_command('dismiss-reminder', 'groups_group_calendar_view_calendar_event_dismiss_reminder')
g.custom_command('forward', 'groups_group_calendar_view_calendar_event_forward')
g.custom_command('snooze-reminder', 'groups_group_calendar_view_calendar_event_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_calendar_view_calendar_event_tentatively_accept')
with self.command_group(
'groups group-calendar-view-instance',
groups_group_calendar_view_instance,
client_factory=cf_group_calendar_view_instance,
) as g:
g.custom_command('accept', 'groups_group_calendar_view_instance_accept')
g.custom_command('cancel', 'groups_group_calendar_view_instance_cancel')
g.custom_command('decline', 'groups_group_calendar_view_instance_decline')
g.custom_command('delta', 'groups_group_calendar_view_instance_delta')
g.custom_command('dismiss-reminder', 'groups_group_calendar_view_instance_dismiss_reminder')
g.custom_command('forward', 'groups_group_calendar_view_instance_forward')
g.custom_command('snooze-reminder', 'groups_group_calendar_view_instance_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_calendar_view_instance_tentatively_accept')
with self.command_group(
'groups group-conversation', groups_group_conversation, client_factory=cf_group_conversation
) as g:
g.custom_command('create-thread', 'groups_group_conversation_create_thread')
g.custom_command('delete-thread', 'groups_group_conversation_delete_thread')
g.custom_command('list-thread', 'groups_group_conversation_list_thread')
g.custom_command('show-thread', 'groups_group_conversation_show_thread')
g.custom_command('update-thread', 'groups_group_conversation_update_thread')
with self.command_group(
'groups group-conversation-thread',
groups_group_conversation_thread,
client_factory=cf_group_conversation_thread,
) as g:
g.custom_command('create-post', 'groups_group_conversation_thread_create_post')
g.custom_command('delete-post', 'groups_group_conversation_thread_delete_post')
g.custom_command('list-post', 'groups_group_conversation_thread_list_post')
g.custom_command('reply', 'groups_group_conversation_thread_reply')
g.custom_command('show-post', 'groups_group_conversation_thread_show_post')
g.custom_command('update-post', 'groups_group_conversation_thread_update_post')
with self.command_group(
'groups group-conversation-thread-post',
groups_group_conversation_thread_post,
client_factory=cf_group_conversation_thread_post,
) as g:
g.custom_command('create-attachment', 'groups_group_conversation_thread_post_create_attachment')
g.custom_command('create-extension', 'groups_group_conversation_thread_post_create_extension')
g.custom_command(
'create-multi-value-extended-property',
'groups_group_conversation_thread_post_create_multi_value_extended_property',
)
g.custom_command(
'create-single-value-extended-property',
'groups_group_conversation_thread_post_create_single_value_extended_property',
)
g.custom_command('delete-attachment', 'groups_group_conversation_thread_post_delete_attachment')
g.custom_command('delete-extension', 'groups_group_conversation_thread_post_delete_extension')
g.custom_command('delete-in-reply-to', 'groups_group_conversation_thread_post_delete_in_reply_to')
g.custom_command(
'delete-multi-value-extended-property',
'groups_group_conversation_thread_post_delete_multi_value_extended_property',
)
g.custom_command(
'delete-single-value-extended-property',
'groups_group_conversation_thread_post_delete_single_value_extended_property',
)
g.custom_command('forward', 'groups_group_conversation_thread_post_forward')
g.custom_command('list-attachment', 'groups_group_conversation_thread_post_list_attachment')
g.custom_command('list-extension', 'groups_group_conversation_thread_post_list_extension')
g.custom_command(
'list-multi-value-extended-property',
'groups_group_conversation_thread_post_list_multi_value_extended_property',
)
g.custom_command(
'list-single-value-extended-property',
'groups_group_conversation_thread_post_list_single_value_extended_property',
)
g.custom_command('reply', 'groups_group_conversation_thread_post_reply')
g.custom_command('show-attachment', 'groups_group_conversation_thread_post_show_attachment')
g.custom_command('show-extension', 'groups_group_conversation_thread_post_show_extension')
g.custom_command('show-in-reply-to', 'groups_group_conversation_thread_post_show_in_reply_to')
g.custom_command(
'show-multi-value-extended-property',
'groups_group_conversation_thread_post_show_multi_value_extended_property',
)
g.custom_command(
'show-single-value-extended-property',
'groups_group_conversation_thread_post_show_single_value_extended_property',
)
g.custom_command('update-attachment', 'groups_group_conversation_thread_post_update_attachment')
g.custom_command('update-extension', 'groups_group_conversation_thread_post_update_extension')
g.custom_command('update-in-reply-to', 'groups_group_conversation_thread_post_update_in_reply_to')
g.custom_command(
'update-multi-value-extended-property',
'groups_group_conversation_thread_post_update_multi_value_extended_property',
)
g.custom_command(
'update-single-value-extended-property',
'groups_group_conversation_thread_post_update_single_value_extended_property',
)
with self.command_group(
'groups group-conversation-thread-post-attachment',
groups_group_conversation_thread_post_attachment,
client_factory=cf_group_conversation_thread_post_attachment,
) as g:
g.custom_command(
'create-upload-session', 'groups_group_conversation_thread_post_attachment_create_upload_session'
)
with self.command_group(
'groups group-conversation-thread-post-in-reply-to',
groups_group_conversation_thread_post_in_reply_to,
client_factory=cf_group_conversation_thread_post_in_reply_to,
) as g:
g.custom_command('forward', 'groups_group_conversation_thread_post_in_reply_to_forward')
g.custom_command('reply', 'groups_group_conversation_thread_post_in_reply_to_reply')
with self.command_group('groups group-event', groups_group_event, client_factory=cf_group_event) as g:
g.custom_command('accept', 'groups_group_event_accept')
g.custom_command('cancel', 'groups_group_event_cancel')
g.custom_command('decline', 'groups_group_event_decline')
g.custom_command('delta', 'groups_group_event_delta')
g.custom_command('dismiss-reminder', 'groups_group_event_dismiss_reminder')
g.custom_command('forward', 'groups_group_event_forward')
g.custom_command('snooze-reminder', 'groups_group_event_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_event_tentatively_accept')
with self.command_group(
'groups group-event-attachment', groups_group_event_attachment, client_factory=cf_group_event_attachment
) as g:
g.custom_command('create-upload-session', 'groups_group_event_attachment_create_upload_session')
with self.command_group(
'groups group-event-calendar', groups_group_event_calendar, client_factory=cf_group_event_calendar
) as g:
g.custom_command('allowed-calendar-sharing-role', 'groups_group_event_calendar_allowed_calendar_sharing_role')
g.custom_command('get-schedule', 'groups_group_event_calendar_get_schedule')
with self.command_group(
'groups group-event-calendar-calendar-view',
groups_group_event_calendar_calendar_view,
client_factory=cf_group_event_calendar_calendar_view,
) as g:
g.custom_command('accept', 'groups_group_event_calendar_calendar_view_accept')
g.custom_command('cancel', 'groups_group_event_calendar_calendar_view_cancel')
g.custom_command('decline', 'groups_group_event_calendar_calendar_view_decline')
g.custom_command('delta', 'groups_group_event_calendar_calendar_view_delta')
g.custom_command('dismiss-reminder', 'groups_group_event_calendar_calendar_view_dismiss_reminder')
g.custom_command('forward', 'groups_group_event_calendar_calendar_view_forward')
g.custom_command('snooze-reminder', 'groups_group_event_calendar_calendar_view_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_event_calendar_calendar_view_tentatively_accept')
with self.command_group(
'groups group-event-calendar-event',
groups_group_event_calendar_event,
client_factory=cf_group_event_calendar_event,
) as g:
g.custom_command('accept', 'groups_group_event_calendar_event_accept')
g.custom_command('cancel', 'groups_group_event_calendar_event_cancel')
g.custom_command('decline', 'groups_group_event_calendar_event_decline')
g.custom_command('delta', 'groups_group_event_calendar_event_delta')
g.custom_command('dismiss-reminder', 'groups_group_event_calendar_event_dismiss_reminder')
g.custom_command('forward', 'groups_group_event_calendar_event_forward')
g.custom_command('snooze-reminder', 'groups_group_event_calendar_event_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_event_calendar_event_tentatively_accept')
with self.command_group(
'groups group-event-instance', groups_group_event_instance, client_factory=cf_group_event_instance
) as g:
g.custom_command('accept', 'groups_group_event_instance_accept')
g.custom_command('cancel', 'groups_group_event_instance_cancel')
g.custom_command('decline', 'groups_group_event_instance_decline')
g.custom_command('delta', 'groups_group_event_instance_delta')
g.custom_command('dismiss-reminder', 'groups_group_event_instance_dismiss_reminder')
g.custom_command('forward', 'groups_group_event_instance_forward')
g.custom_command('snooze-reminder', 'groups_group_event_instance_snooze_reminder')
g.custom_command('tentatively-accept', 'groups_group_event_instance_tentatively_accept')
with self.command_group('groups group-group', groups_group_group, client_factory=cf_group_group) as g:
g.custom_command('create-group', 'groups_group_group_create_group')
g.custom_command('delete-group', 'groups_group_group_delete_group')
g.custom_command('list-group', 'groups_group_group_list_group')
g.custom_command('show-group', 'groups_group_group_show_group')
g.custom_command('update-group', 'groups_group_group_update_group')
with self.command_group(
'groups group-lifecycle-policy', groups_group_lifecycle_policy, client_factory=cf_group_lifecycle_policy
) as g:
g.custom_command('add-group', 'groups_group_lifecycle_policy_add_group')
g.custom_command('remove-group', 'groups_group_lifecycle_policy_remove_group')
with self.command_group(
'groups group-lifecycle-policy-group-lifecycle-policy',
groups_group_lifecycle_policy_group_lifecycle_policy,
client_factory=cf_group_lifecycle_policy_group_lifecycle_policy,
) as g:
g.custom_command(
'create-group-lifecycle-policy',
'groups_group_lifecycle_policy_group_lifecycle_policy_create_group_lifecycle_policy',
)
g.custom_command(
'delete-group-lifecycle-policy',
'groups_group_lifecycle_policy_group_lifecycle_policy_delete_group_lifecycle_policy',
)
g.custom_command(
'list-group-lifecycle-policy',
'groups_group_lifecycle_policy_group_lifecycle_policy_list_group_lifecycle_policy',
)
g.custom_command(
'show-group-lifecycle-policy',
'groups_group_lifecycle_policy_group_lifecycle_policy_show_group_lifecycle_policy',
)
g.custom_command(
'update-group-lifecycle-policy',
'groups_group_lifecycle_policy_group_lifecycle_policy_update_group_lifecycle_policy',
)
with self.command_group(
'groups group-onenote-notebook', groups_group_onenote_notebook, client_factory=cf_group_onenote_notebook
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_notebook_copy_notebook')
g.custom_command('get-notebook-from-web-url', 'groups_group_onenote_notebook_get_notebook_from_web_url')
g.custom_command('show-recent-notebook', 'groups_group_onenote_notebook_show_recent_notebook')
with self.command_group(
'groups group-onenote-notebook-section',
groups_group_onenote_notebook_section,
client_factory=cf_group_onenote_notebook_section,
) as g:
g.custom_command('copy-to-notebook', 'groups_group_onenote_notebook_section_copy_to_notebook')
g.custom_command('copy-to-section-group', 'groups_group_onenote_notebook_section_copy_to_section_group')
with self.command_group(
'groups group-onenote-notebook-section-group-parent-notebook',
groups_group_onenote_notebook_section_group_parent_notebook,
client_factory=cf_group_onenote_notebook_section_group_parent_notebook,
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_notebook_section_group_parent_notebook_copy_notebook')
with self.command_group(
'groups group-onenote-notebook-section-group-section',
groups_group_onenote_notebook_section_group_section,
client_factory=cf_group_onenote_notebook_section_group_section,
) as g:
g.custom_command('copy-to-notebook', 'groups_group_onenote_notebook_section_group_section_copy_to_notebook')
g.custom_command(
'copy-to-section-group', 'groups_group_onenote_notebook_section_group_section_copy_to_section_group'
)
with self.command_group(
'groups group-onenote-notebook-section-group-section-page',
groups_group_onenote_notebook_section_group_section_page,
client_factory=cf_group_onenote_notebook_section_group_section_page,
) as g:
g.custom_command('copy-to-section', 'groups_group_onenote_notebook_section_group_section_page_copy_to_section')
g.custom_command(
'onenote-patch-content', 'groups_group_onenote_notebook_section_group_section_page_onenote_patch_content'
)
g.custom_command('preview', 'groups_group_onenote_notebook_section_group_section_page_preview')
with self.command_group(
'groups group-onenote-notebook-section-group-section-page-parent-notebook',
groups_group_onenote_notebook_section_group_section_page_parent_notebook,
client_factory=cf_group_onenote_notebook_section_group_section_page_parent_notebook,
) as g:
g.custom_command(
'copy-notebook', 'groups_group_onenote_notebook_section_group_section_page_parent_notebook_copy_notebook'
)
with self.command_group(
'groups group-onenote-notebook-section-group-section-page-parent-section',
groups_group_onenote_notebook_section_group_section_page_parent_section,
client_factory=cf_group_onenote_notebook_section_group_section_page_parent_section,
) as g:
g.custom_command(
'copy-to-notebook',
'groups_group_onenote_notebook_section_group_section_page_parent_section_copy_to_notebook',
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_notebook_section_group_section_page_parent_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-notebook-section-group-section-parent-notebook',
groups_group_onenote_notebook_section_group_section_parent_notebook,
client_factory=cf_group_onenote_notebook_section_group_section_parent_notebook,
) as g:
g.custom_command(
'copy-notebook', 'groups_group_onenote_notebook_section_group_section_parent_notebook_copy_notebook'
)
with self.command_group(
'groups group-onenote-notebook-section-page',
groups_group_onenote_notebook_section_page,
client_factory=cf_group_onenote_notebook_section_page,
) as g:
g.custom_command('copy-to-section', 'groups_group_onenote_notebook_section_page_copy_to_section')
g.custom_command('onenote-patch-content', 'groups_group_onenote_notebook_section_page_onenote_patch_content')
g.custom_command('preview', 'groups_group_onenote_notebook_section_page_preview')
with self.command_group(
'groups group-onenote-notebook-section-page-parent-notebook',
groups_group_onenote_notebook_section_page_parent_notebook,
client_factory=cf_group_onenote_notebook_section_page_parent_notebook,
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_notebook_section_page_parent_notebook_copy_notebook')
with self.command_group(
'groups group-onenote-notebook-section-page-parent-section',
groups_group_onenote_notebook_section_page_parent_section,
client_factory=cf_group_onenote_notebook_section_page_parent_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_notebook_section_page_parent_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group', 'groups_group_onenote_notebook_section_page_parent_section_copy_to_section_group'
)
with self.command_group(
'groups group-onenote-notebook-section-parent-notebook',
groups_group_onenote_notebook_section_parent_notebook,
client_factory=cf_group_onenote_notebook_section_parent_notebook,
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_notebook_section_parent_notebook_copy_notebook')
with self.command_group(
'groups group-onenote-notebook-section-parent-section-group-parent-notebook',
groups_group_onenote_notebook_section_parent_section_group_parent_notebook,
client_factory=cf_group_onenote_notebook_section_parent_section_group_parent_notebook,
) as g:
g.custom_command(
'copy-notebook', 'groups_group_onenote_notebook_section_parent_section_group_parent_notebook_copy_notebook'
)
with self.command_group(
'groups group-onenote-notebook-section-parent-section-group-section',
groups_group_onenote_notebook_section_parent_section_group_section,
client_factory=cf_group_onenote_notebook_section_parent_section_group_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_notebook_section_parent_section_group_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_notebook_section_parent_section_group_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-page', groups_group_onenote_page, client_factory=cf_group_onenote_page
) as g:
g.custom_command('copy-to-section', 'groups_group_onenote_page_copy_to_section')
g.custom_command('onenote-patch-content', 'groups_group_onenote_page_onenote_patch_content')
g.custom_command('preview', 'groups_group_onenote_page_preview')
with self.command_group(
'groups group-onenote-page-parent-notebook',
groups_group_onenote_page_parent_notebook,
client_factory=cf_group_onenote_page_parent_notebook,
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_page_parent_notebook_copy_notebook')
with self.command_group(
'groups group-onenote-page-parent-notebook-section',
groups_group_onenote_page_parent_notebook_section,
client_factory=cf_group_onenote_page_parent_notebook_section,
) as g:
g.custom_command('copy-to-notebook', 'groups_group_onenote_page_parent_notebook_section_copy_to_notebook')
g.custom_command(
'copy-to-section-group', 'groups_group_onenote_page_parent_notebook_section_copy_to_section_group'
)
with self.command_group(
'groups group-onenote-page-parent-notebook-section-group-parent-notebook',
groups_group_onenote_page_parent_notebook_section_group_parent_notebook,
client_factory=cf_group_onenote_page_parent_notebook_section_group_parent_notebook,
) as g:
g.custom_command(
'copy-notebook', 'groups_group_onenote_page_parent_notebook_section_group_parent_notebook_copy_notebook'
)
with self.command_group(
'groups group-onenote-page-parent-notebook-section-group-section',
groups_group_onenote_page_parent_notebook_section_group_section,
client_factory=cf_group_onenote_page_parent_notebook_section_group_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_page_parent_notebook_section_group_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_page_parent_notebook_section_group_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-page-parent-notebook-section-group-section-page',
groups_group_onenote_page_parent_notebook_section_group_section_page,
client_factory=cf_group_onenote_page_parent_notebook_section_group_section_page,
) as g:
g.custom_command(
'copy-to-section', 'groups_group_onenote_page_parent_notebook_section_group_section_page_copy_to_section'
)
g.custom_command(
'onenote-patch-content',
'groups_group_onenote_page_parent_notebook_section_group_section_page_onenote_patch_content',
)
g.custom_command('preview', 'groups_group_onenote_page_parent_notebook_section_group_section_page_preview')
with self.command_group(
'groups group-onenote-page-parent-notebook-section-group-section-parent-notebook',
groups_group_onenote_page_parent_notebook_section_group_section_parent_notebook,
client_factory=cf_group_onenote_page_parent_notebook_section_group_section_parent_notebook,
) as g:
g.custom_command(
'copy-notebook',
'groups_group_onenote_page_parent_notebook_section_group_section_parent_notebook_copy_notebook',
)
with self.command_group(
'groups group-onenote-page-parent-notebook-section-page',
groups_group_onenote_page_parent_notebook_section_page,
client_factory=cf_group_onenote_page_parent_notebook_section_page,
) as g:
g.custom_command('copy-to-section', 'groups_group_onenote_page_parent_notebook_section_page_copy_to_section')
g.custom_command(
'onenote-patch-content', 'groups_group_onenote_page_parent_notebook_section_page_onenote_patch_content'
)
g.custom_command('preview', 'groups_group_onenote_page_parent_notebook_section_page_preview')
with self.command_group(
'groups group-onenote-page-parent-notebook-section-parent-notebook',
groups_group_onenote_page_parent_notebook_section_parent_notebook,
client_factory=cf_group_onenote_page_parent_notebook_section_parent_notebook,
) as g:
g.custom_command(
'copy-notebook', 'groups_group_onenote_page_parent_notebook_section_parent_notebook_copy_notebook'
)
with self.command_group(
'groups group-onenote-page-parent-notebook-section-parent-section-group-parent-notebook',
groups_group_onenote_page_parent_notebook_section_parent_section_group_parent_notebook,
client_factory=cf_group_onenote_page_parent_notebook_section_parent_section_group_parent_notebook,
) as g:
g.custom_command(
'copy-notebook',
'groups_group_onenote_page_parent_notebook_section_parent_section_group_parent_notebook_copy_notebook',
)
with self.command_group(
'groups group-onenote-page-parent-notebook-section-parent-section-group-section',
groups_group_onenote_page_parent_notebook_section_parent_section_group_section,
client_factory=cf_group_onenote_page_parent_notebook_section_parent_section_group_section,
) as g:
g.custom_command(
'copy-to-notebook',
'groups_group_onenote_page_parent_notebook_section_parent_section_group_section_copy_to_notebook',
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_page_parent_notebook_section_parent_section_group_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-page-parent-section',
groups_group_onenote_page_parent_section,
client_factory=cf_group_onenote_page_parent_section,
) as g:
g.custom_command('copy-to-notebook', 'groups_group_onenote_page_parent_section_copy_to_notebook')
g.custom_command('copy-to-section-group', 'groups_group_onenote_page_parent_section_copy_to_section_group')
with self.command_group(
'groups group-onenote-page-parent-section-page',
groups_group_onenote_page_parent_section_page,
client_factory=cf_group_onenote_page_parent_section_page,
) as g:
g.custom_command('copy-to-section', 'groups_group_onenote_page_parent_section_page_copy_to_section')
g.custom_command('onenote-patch-content', 'groups_group_onenote_page_parent_section_page_onenote_patch_content')
g.custom_command('preview', 'groups_group_onenote_page_parent_section_page_preview')
with self.command_group(
'groups group-onenote-page-parent-section-parent-notebook',
groups_group_onenote_page_parent_section_parent_notebook,
client_factory=cf_group_onenote_page_parent_section_parent_notebook,
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_page_parent_section_parent_notebook_copy_notebook')
with self.command_group(
'groups group-onenote-page-parent-section-parent-notebook-section',
groups_group_onenote_page_parent_section_parent_notebook_section,
client_factory=cf_group_onenote_page_parent_section_parent_notebook_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_page_parent_section_parent_notebook_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_page_parent_section_parent_notebook_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-page-parent-section-parent-notebook-section-group-parent-notebook',
groups_group_onenote_page_parent_section_parent_notebook_section_group_parent_notebook,
client_factory=cf_group_onenote_page_parent_section_parent_notebook_section_group_parent_notebook,
) as g:
g.custom_command(
'copy-notebook',
'groups_group_onenote_page_parent_section_parent_notebook_section_group_parent_notebook_copy_notebook',
)
with self.command_group(
'groups group-onenote-page-parent-section-parent-notebook-section-group-section',
groups_group_onenote_page_parent_section_parent_notebook_section_group_section,
client_factory=cf_group_onenote_page_parent_section_parent_notebook_section_group_section,
) as g:
g.custom_command(
'copy-to-notebook',
'groups_group_onenote_page_parent_section_parent_notebook_section_group_section_copy_to_notebook',
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_page_parent_section_parent_notebook_section_group_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-page-parent-section-parent-section-group-parent-notebook',
groups_group_onenote_page_parent_section_parent_section_group_parent_notebook,
client_factory=cf_group_onenote_page_parent_section_parent_section_group_parent_notebook,
) as g:
g.custom_command(
'copy-notebook',
'groups_group_onenote_page_parent_section_parent_section_group_parent_notebook_copy_notebook',
)
with self.command_group(
'groups group-onenote-page-parent-section-parent-section-group-parent-notebook-section',
groups_group_onenote_page_parent_section_parent_section_group_parent_notebook_section,
client_factory=cf_group_onenote_page_parent_section_parent_section_group_parent_notebook_section,
) as g:
g.custom_command(
'copy-to-notebook',
'groups_group_onenote_page_parent_section_parent_section_group_parent_notebook_section_copy_to_notebook',
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_page_parent_section_parent_section_group_parent_notebook_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-page-parent-section-parent-section-group-section',
groups_group_onenote_page_parent_section_parent_section_group_section,
client_factory=cf_group_onenote_page_parent_section_parent_section_group_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_page_parent_section_parent_section_group_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_page_parent_section_parent_section_group_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-section', groups_group_onenote_section, client_factory=cf_group_onenote_section
) as g:
g.custom_command('copy-to-notebook', 'groups_group_onenote_section_copy_to_notebook')
g.custom_command('copy-to-section-group', 'groups_group_onenote_section_copy_to_section_group')
with self.command_group(
'groups group-onenote-section-group-parent-notebook',
groups_group_onenote_section_group_parent_notebook,
client_factory=cf_group_onenote_section_group_parent_notebook,
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_section_group_parent_notebook_copy_notebook')
with self.command_group(
'groups group-onenote-section-group-parent-notebook-section',
groups_group_onenote_section_group_parent_notebook_section,
client_factory=cf_group_onenote_section_group_parent_notebook_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_section_group_parent_notebook_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group', 'groups_group_onenote_section_group_parent_notebook_section_copy_to_section_group'
)
with self.command_group(
'groups group-onenote-section-group-parent-notebook-section-page',
groups_group_onenote_section_group_parent_notebook_section_page,
client_factory=cf_group_onenote_section_group_parent_notebook_section_page,
) as g:
g.custom_command(
'copy-to-section', 'groups_group_onenote_section_group_parent_notebook_section_page_copy_to_section'
)
g.custom_command(
'onenote-patch-content',
'groups_group_onenote_section_group_parent_notebook_section_page_onenote_patch_content',
)
g.custom_command('preview', 'groups_group_onenote_section_group_parent_notebook_section_page_preview')
with self.command_group(
'groups group-onenote-section-group-parent-notebook-section-page-parent-notebook',
groups_group_onenote_section_group_parent_notebook_section_page_parent_notebook,
client_factory=cf_group_onenote_section_group_parent_notebook_section_page_parent_notebook,
) as g:
g.custom_command(
'copy-notebook',
'groups_group_onenote_section_group_parent_notebook_section_page_parent_notebook_copy_notebook',
)
with self.command_group(
'groups group-onenote-section-group-parent-notebook-section-page-parent-section',
groups_group_onenote_section_group_parent_notebook_section_page_parent_section,
client_factory=cf_group_onenote_section_group_parent_notebook_section_page_parent_section,
) as g:
g.custom_command(
'copy-to-notebook',
'groups_group_onenote_section_group_parent_notebook_section_page_parent_section_copy_to_notebook',
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_section_group_parent_notebook_section_page_parent_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-section-group-parent-notebook-section-parent-notebook',
groups_group_onenote_section_group_parent_notebook_section_parent_notebook,
client_factory=cf_group_onenote_section_group_parent_notebook_section_parent_notebook,
) as g:
g.custom_command(
'copy-notebook', 'groups_group_onenote_section_group_parent_notebook_section_parent_notebook_copy_notebook'
)
with self.command_group(
'groups group-onenote-section-group-section',
groups_group_onenote_section_group_section,
client_factory=cf_group_onenote_section_group_section,
) as g:
g.custom_command('copy-to-notebook', 'groups_group_onenote_section_group_section_copy_to_notebook')
g.custom_command('copy-to-section-group', 'groups_group_onenote_section_group_section_copy_to_section_group')
with self.command_group(
'groups group-onenote-section-group-section-page',
groups_group_onenote_section_group_section_page,
client_factory=cf_group_onenote_section_group_section_page,
) as g:
g.custom_command('copy-to-section', 'groups_group_onenote_section_group_section_page_copy_to_section')
g.custom_command(
'onenote-patch-content', 'groups_group_onenote_section_group_section_page_onenote_patch_content'
)
g.custom_command('preview', 'groups_group_onenote_section_group_section_page_preview')
with self.command_group(
'groups group-onenote-section-group-section-page-parent-notebook',
groups_group_onenote_section_group_section_page_parent_notebook,
client_factory=cf_group_onenote_section_group_section_page_parent_notebook,
) as g:
g.custom_command(
'copy-notebook', 'groups_group_onenote_section_group_section_page_parent_notebook_copy_notebook'
)
with self.command_group(
'groups group-onenote-section-group-section-page-parent-notebook-section',
groups_group_onenote_section_group_section_page_parent_notebook_section,
client_factory=cf_group_onenote_section_group_section_page_parent_notebook_section,
) as g:
g.custom_command(
'copy-to-notebook',
'groups_group_onenote_section_group_section_page_parent_notebook_section_copy_to_notebook',
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_section_group_section_page_parent_notebook_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-section-group-section-page-parent-section',
groups_group_onenote_section_group_section_page_parent_section,
client_factory=cf_group_onenote_section_group_section_page_parent_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_section_group_section_page_parent_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_section_group_section_page_parent_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-section-group-section-parent-notebook',
groups_group_onenote_section_group_section_parent_notebook,
client_factory=cf_group_onenote_section_group_section_parent_notebook,
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_section_group_section_parent_notebook_copy_notebook')
with self.command_group(
'groups group-onenote-section-group-section-parent-notebook-section',
groups_group_onenote_section_group_section_parent_notebook_section,
client_factory=cf_group_onenote_section_group_section_parent_notebook_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_section_group_section_parent_notebook_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_section_group_section_parent_notebook_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-section-page',
groups_group_onenote_section_page,
client_factory=cf_group_onenote_section_page,
) as g:
g.custom_command('copy-to-section', 'groups_group_onenote_section_page_copy_to_section')
g.custom_command('onenote-patch-content', 'groups_group_onenote_section_page_onenote_patch_content')
g.custom_command('preview', 'groups_group_onenote_section_page_preview')
with self.command_group(
'groups group-onenote-section-page-parent-notebook',
groups_group_onenote_section_page_parent_notebook,
client_factory=cf_group_onenote_section_page_parent_notebook,
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_section_page_parent_notebook_copy_notebook')
with self.command_group(
'groups group-onenote-section-page-parent-notebook-section',
groups_group_onenote_section_page_parent_notebook_section,
client_factory=cf_group_onenote_section_page_parent_notebook_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_section_page_parent_notebook_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group', 'groups_group_onenote_section_page_parent_notebook_section_copy_to_section_group'
)
with self.command_group(
'groups group-onenote-section-page-parent-notebook-section-group-parent-notebook',
groups_group_onenote_section_page_parent_notebook_section_group_parent_notebook,
client_factory=cf_group_onenote_section_page_parent_notebook_section_group_parent_notebook,
) as g:
g.custom_command(
'copy-notebook',
'groups_group_onenote_section_page_parent_notebook_section_group_parent_notebook_copy_notebook',
)
with self.command_group(
'groups group-onenote-section-page-parent-notebook-section-group-section',
groups_group_onenote_section_page_parent_notebook_section_group_section,
client_factory=cf_group_onenote_section_page_parent_notebook_section_group_section,
) as g:
g.custom_command(
'copy-to-notebook',
'groups_group_onenote_section_page_parent_notebook_section_group_section_copy_to_notebook',
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_section_page_parent_notebook_section_group_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-section-page-parent-section',
groups_group_onenote_section_page_parent_section,
client_factory=cf_group_onenote_section_page_parent_section,
) as g:
g.custom_command('copy-to-notebook', 'groups_group_onenote_section_page_parent_section_copy_to_notebook')
g.custom_command(
'copy-to-section-group', 'groups_group_onenote_section_page_parent_section_copy_to_section_group'
)
with self.command_group(
'groups group-onenote-section-parent-notebook',
groups_group_onenote_section_parent_notebook,
client_factory=cf_group_onenote_section_parent_notebook,
) as g:
g.custom_command('copy-notebook', 'groups_group_onenote_section_parent_notebook_copy_notebook')
with self.command_group(
'groups group-onenote-section-parent-notebook-section',
groups_group_onenote_section_parent_notebook_section,
client_factory=cf_group_onenote_section_parent_notebook_section,
) as g:
g.custom_command('copy-to-notebook', 'groups_group_onenote_section_parent_notebook_section_copy_to_notebook')
g.custom_command(
'copy-to-section-group', 'groups_group_onenote_section_parent_notebook_section_copy_to_section_group'
)
with self.command_group(
'groups group-onenote-section-parent-notebook-section-group-parent-notebook',
groups_group_onenote_section_parent_notebook_section_group_parent_notebook,
client_factory=cf_group_onenote_section_parent_notebook_section_group_parent_notebook,
) as g:
g.custom_command(
'copy-notebook', 'groups_group_onenote_section_parent_notebook_section_group_parent_notebook_copy_notebook'
)
with self.command_group(
'groups group-onenote-section-parent-notebook-section-group-section',
groups_group_onenote_section_parent_notebook_section_group_section,
client_factory=cf_group_onenote_section_parent_notebook_section_group_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_section_parent_notebook_section_group_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_section_parent_notebook_section_group_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-section-parent-section-group-parent-notebook',
groups_group_onenote_section_parent_section_group_parent_notebook,
client_factory=cf_group_onenote_section_parent_section_group_parent_notebook,
) as g:
g.custom_command(
'copy-notebook', 'groups_group_onenote_section_parent_section_group_parent_notebook_copy_notebook'
)
with self.command_group(
'groups group-onenote-section-parent-section-group-parent-notebook-section',
groups_group_onenote_section_parent_section_group_parent_notebook_section,
client_factory=cf_group_onenote_section_parent_section_group_parent_notebook_section,
) as g:
g.custom_command(
'copy-to-notebook',
'groups_group_onenote_section_parent_section_group_parent_notebook_section_copy_to_notebook',
)
g.custom_command(
'copy-to-section-group',
'groups_group_onenote_section_parent_section_group_parent_notebook_section_copy_to_section_group',
)
with self.command_group(
'groups group-onenote-section-parent-section-group-section',
groups_group_onenote_section_parent_section_group_section,
client_factory=cf_group_onenote_section_parent_section_group_section,
) as g:
g.custom_command(
'copy-to-notebook', 'groups_group_onenote_section_parent_section_group_section_copy_to_notebook'
)
g.custom_command(
'copy-to-section-group', 'groups_group_onenote_section_parent_section_group_section_copy_to_section_group'
)
with self.command_group('groups group-thread', groups_group_thread, client_factory=cf_group_thread) as g:
g.custom_command('create-post', 'groups_group_thread_create_post')
g.custom_command('delete-post', 'groups_group_thread_delete_post')
g.custom_command('list-post', 'groups_group_thread_list_post')
g.custom_command('reply', 'groups_group_thread_reply')
g.custom_command('show-post', 'groups_group_thread_show_post')
g.custom_command('update-post', 'groups_group_thread_update_post')
with self.command_group(
'groups group-thread-post', groups_group_thread_post, client_factory=cf_group_thread_post
) as g:
g.custom_command('create-attachment', 'groups_group_thread_post_create_attachment')
g.custom_command('create-extension', 'groups_group_thread_post_create_extension')
g.custom_command(
'create-multi-value-extended-property', 'groups_group_thread_post_create_multi_value_extended_property'
)
g.custom_command(
'create-single-value-extended-property', 'groups_group_thread_post_create_single_value_extended_property'
)
g.custom_command('delete-attachment', 'groups_group_thread_post_delete_attachment')
g.custom_command('delete-extension', 'groups_group_thread_post_delete_extension')
g.custom_command('delete-in-reply-to', 'groups_group_thread_post_delete_in_reply_to')
g.custom_command(
'delete-multi-value-extended-property', 'groups_group_thread_post_delete_multi_value_extended_property'
)
g.custom_command(
'delete-single-value-extended-property', 'groups_group_thread_post_delete_single_value_extended_property'
)
g.custom_command('forward', 'groups_group_thread_post_forward')
g.custom_command('list-attachment', 'groups_group_thread_post_list_attachment')
g.custom_command('list-extension', 'groups_group_thread_post_list_extension')
g.custom_command(
'list-multi-value-extended-property', 'groups_group_thread_post_list_multi_value_extended_property'
)
g.custom_command(
'list-single-value-extended-property', 'groups_group_thread_post_list_single_value_extended_property'
)
g.custom_command('reply', 'groups_group_thread_post_reply')
g.custom_command('show-attachment', 'groups_group_thread_post_show_attachment')
g.custom_command('show-extension', 'groups_group_thread_post_show_extension')
g.custom_command('show-in-reply-to', 'groups_group_thread_post_show_in_reply_to')
g.custom_command(
'show-multi-value-extended-property', 'groups_group_thread_post_show_multi_value_extended_property'
)
g.custom_command(
'show-single-value-extended-property', 'groups_group_thread_post_show_single_value_extended_property'
)
g.custom_command('update-attachment', 'groups_group_thread_post_update_attachment')
g.custom_command('update-extension', 'groups_group_thread_post_update_extension')
g.custom_command('update-in-reply-to', 'groups_group_thread_post_update_in_reply_to')
g.custom_command(
'update-multi-value-extended-property', 'groups_group_thread_post_update_multi_value_extended_property'
)
g.custom_command(
'update-single-value-extended-property', 'groups_group_thread_post_update_single_value_extended_property'
)
with self.command_group(
'groups group-thread-post-attachment',
groups_group_thread_post_attachment,
client_factory=cf_group_thread_post_attachment,
) as g:
g.custom_command('create-upload-session', 'groups_group_thread_post_attachment_create_upload_session')
with self.command_group(
'groups group-thread-post-in-reply-to',
groups_group_thread_post_in_reply_to,
client_factory=cf_group_thread_post_in_reply_to,
) as g:
g.custom_command('forward', 'groups_group_thread_post_in_reply_to_forward')
g.custom_command('reply', 'groups_group_thread_post_in_reply_to_reply')
with self.command_group('groups', is_experimental=True):
pass
| 54.276237
| 248
| 0.807692
| 11,731
| 99,814
| 6.267923
| 0.02276
| 0.098886
| 0.0714
| 0.05168
| 0.891675
| 0.862136
| 0.8297
| 0.795033
| 0.769588
| 0.726285
| 0
| 0
| 0.122458
| 99,814
| 1,838
| 249
| 54.305767
| 0.839458
| 0.005711
| 0
| 0.309245
| 0
| 0.001953
| 0.461131
| 0.417545
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000651
| false
| 0.000651
| 0.001302
| 0
| 0.001953
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90a5f75240a41a5aa782800ec53d17d420f123ee
| 8,356
|
py
|
Python
|
IPFIX_visualization/models.py
|
WKobes/ipvix
|
e8571a45088209812971fb476d6b491141cce9ea
|
[
"MIT"
] | null | null | null |
IPFIX_visualization/models.py
|
WKobes/ipvix
|
e8571a45088209812971fb476d6b491141cce9ea
|
[
"MIT"
] | null | null | null |
IPFIX_visualization/models.py
|
WKobes/ipvix
|
e8571a45088209812971fb476d6b491141cce9ea
|
[
"MIT"
] | null | null | null |
from django.db import models
from datetime import datetime
from operator import itemgetter
class Visualization(models.Model):
id = models.AutoField(primary_key=True)
enterprise_id = models.IntegerField()
name = models.CharField(max_length=255)
description = models.CharField(max_length=255, blank=True)
function = models.CharField(max_length=255, blank=True)
class Meta:
unique_together = ("id", "enterprise_id")
def __str__(self):
return self.name
class Type(models.Model):
id = models.AutoField(primary_key=True)
enterprise_id = models.IntegerField()
name = models.CharField(max_length=255)
datatype = models.CharField(max_length=255, blank=True)
description = models.CharField(max_length=1024, blank=True)
units = models.CharField(max_length=255, blank=True)
class Meta:
unique_together = ("id", "enterprise_id")
def __str__(self):
return self.name
class TypeToVisualization(models.Model):
id = models.AutoField(primary_key=True)
type_enterprise_id = models.IntegerField()
type = models.ForeignKey(Type, models.DO_NOTHING)
visual_enterprise_id = models.IntegerField()
visual = models.ForeignKey(Visualization, models.DO_NOTHING)
filename = models.CharField(max_length=500, blank=True)
def __str__(self):
return self.visual + " " + self.type
@staticmethod
def get_plot_numbers_time(filename=None):
xdata = []
ydata = []
file = open(filename, 'r')
# TODO count verwijderen
count = 0
for line in file:
line = line[:-1]
count += 1
if count == 100000:
break
words = line.split(' ')
words[words.__len__()-1] = "".join(words[words.__len__()-1].split())
ydata.append(int(words[words.__len__()-1]))
tmp = words[0].split('.')[1]
date_object = datetime.strptime(words[0].split('.')[0], '%Y-%m-%d %H:%M:%S')
xdata.append(int(date_object.strftime("%s")) * 1000 + int(tmp))
temp = [list(x) for x in zip(*sorted(zip(xdata, ydata), key=itemgetter(0)))]
xdata = temp[0]
ydata = temp[1]
chartdata = {'x': xdata, 'y': ydata, 'name': 'Data', }
charttype = "lineChart"
chartcontainer = 'linechart_container'
data = {
'charttype': charttype,
'chartdata': chartdata,
'chartcontainer': chartcontainer,
'extra': {
'x_is_date': True,
'x_axis_format': '%H:%M:%S',
'tag_script_js': True,
'jquery_on_ready': False,
'key': 'Data',
}
}
return data
@staticmethod
def get_count(filename=None):
xdata = []
ydata = []
file = open(filename, 'r')
for line in file:
line = line[:-1]
words = line.split(' ')
tmp = words[words.__len__() - 1]
tmp = "".join(tmp.split())
if tmp not in xdata:
xdata.append(tmp)
ydata.append(1)
else:
ydata[xdata.index(tmp)] += 1
chartdata = {'x': xdata, 'y': ydata, 'name': 'Data', }
charttype = "discreteBarChart"
chartcontainer = 'discretebarchart_container'
data = {
'charttype': charttype,
'chartdata': chartdata,
'chartcontainer': chartcontainer,
'extra': {
'x_is_date': False,
'x_axis_format': '',
'tag_script_js': True,
'jquery_on_ready': False,
}
}
return data
@staticmethod
def get_bar_chart_not_0(filename=None):
xdata = []
ydata = []
file = open(filename, 'r')
# TODO count verwijderen
count = 0
for line in file:
line = line[:-1]
count += 1
if count == 100000:
break
words = line.split(' ')
words[words.__len__()-1] = "".join(words[words.__len__()-1].split())
if int(words[words.__len__()-1]) != 0:
ydata.append(int(words[words.__len__()-1]))
tmp = words[0].split('.')[1]
date_object = datetime.strptime(words[0].split('.')[0], '%Y-%m-%d %H:%M:%S')
xdata.append(int(date_object.strftime("%s")) * 1000 + int(tmp))
temp = [list(x) for x in zip(*sorted(zip(xdata, ydata), key=itemgetter(0)))]
xdata = temp[0]
ydata = temp[1]
chartdata = {'x': xdata, 'y': ydata, 'name': 'Data', }
charttype = "lineChart"
chartcontainer = 'linechart_container'
data = {
'charttype': charttype,
'chartdata': chartdata,
'chartcontainer': chartcontainer,
'extra': {
'x_is_date': True,
'x_axis_format': '%H:%M:%S',
'tag_script_js': True,
'jquery_on_ready': False,
}
}
return data
@staticmethod
def get_line_chart_sum_second(filename=None):
xdata = []
ydata = []
file = open(filename, 'r')
for line in file:
date_object = datetime.strptime(line[:19], '%Y-%m-%d %H:%M:%S')
tmp = int(date_object.strftime("%s")) * 1000
tmp2 = int(line[23:-1])
if tmp not in xdata:
xdata.append(tmp)
ydata.append(tmp2)
else:
ydata[xdata.index(tmp)] += tmp2
temp = [list(x) for x in zip(*sorted(zip(xdata, ydata), key=itemgetter(0)))]
xdata = temp[0]
ydata = temp[1]
chartdata = {'x': xdata, 'y': ydata, 'name': 'Data', }
charttype = "lineChart"
chartcontainer = 'linechart_container'
data = {
'charttype': charttype,
'chartdata': chartdata,
'chartcontainer': chartcontainer,
'extra': {
'x_is_date': True,
'x_axis_format': '%H:%M:%S',
'tag_script_js': True,
'jquery_on_ready': False,
'key': 'Data',
}
}
return data
@staticmethod
def get_cdf(filename=None):
xdata = []
file = open(filename, 'r')
# TODO Delete count
count = 0
for line in file:
count += 1
if count > 100000:
break
tmp = int(line[23:-1])
xdata.append(tmp)
import numpy as np
sorted = np.sort(xdata)
yvals = np.arange(len(sorted)) / float(len(sorted))
chartdata = {'x': sorted, 'y': yvals, 'name': 'Data'}
charttype = "lineChart"
chartcontainer = 'linechart_container'
data = {
'charttype': charttype,
'chartdata': chartdata,
'chartcontainer': chartcontainer,
'extra': {
'x_is_date': False,
'x_axis_format': '',
'tag_script_js': True,
'jquery_on_ready': False,
},
}
return data
@staticmethod
def get_cdf_not_0(filename=None):
xdata = []
file = open(filename, 'r')
# TODO Delete count
count = 0
for line in file:
count += 1
if count > 100000:
break
tmp = int(line[23:-1])
if tmp != 0:
xdata.append(tmp)
import numpy as np
sorted = np.sort(xdata)
yvals = np.arange(len(sorted)) / float(len(sorted))
chartdata = {'x': sorted, 'y': yvals, 'name': 'Data'}
charttype = "lineChart"
chartcontainer = 'linechart_container'
data = {
'charttype': charttype,
'chartdata': chartdata,
'chartcontainer': chartcontainer,
'extra': {
'x_is_date': False,
'x_axis_format': '',
'tag_script_js': True,
'jquery_on_ready': False,
},
}
return data
| 31.063197
| 92
| 0.500957
| 847
| 8,356
| 4.773318
| 0.14758
| 0.038585
| 0.035617
| 0.047489
| 0.861242
| 0.823646
| 0.810537
| 0.790502
| 0.780114
| 0.770467
| 0
| 0.022205
| 0.369435
| 8,356
| 268
| 93
| 31.179104
| 0.745113
| 0.009694
| 0
| 0.788546
| 0
| 0
| 0.109553
| 0.003144
| 0
| 0
| 0
| 0.003731
| 0
| 1
| 0.039648
| false
| 0
| 0.022026
| 0.013216
| 0.198238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90bb531f275ad647aff24888a78cc038ff2ccf1a
| 26,969
|
py
|
Python
|
perception.py
|
terraregina/BalancingControl
|
36330cc0a20ad1f2fbd3a8f87ef8fed98df3fb22
|
[
"MIT"
] | 1
|
2021-08-16T02:34:15.000Z
|
2021-08-16T02:34:15.000Z
|
perception.py
|
terraregina/BalancingControl
|
36330cc0a20ad1f2fbd3a8f87ef8fed98df3fb22
|
[
"MIT"
] | null | null | null |
perception.py
|
terraregina/BalancingControl
|
36330cc0a20ad1f2fbd3a8f87ef8fed98df3fb22
|
[
"MIT"
] | 4
|
2021-06-07T15:15:28.000Z
|
2021-11-11T13:05:12.000Z
|
from misc import ln, softmax
import numpy as np
import scipy.special as scs
from misc import D_KL_nd_dirichlet, D_KL_dirichlet_categorical
class HierarchicalPerception(object):
def __init__(self,
generative_model_observations,
generative_model_states,
generative_model_rewards,
transition_matrix_context,
prior_states,
prior_rewards,
prior_policies,
dirichlet_pol_params = None,
dirichlet_rew_params = None,
generative_model_context = None,
T=5, pol_lambda=0, r_lambda=0, non_decaying=0, dec_temp=1.):
self.generative_model_observations = generative_model_observations.copy()
if len(generative_model_states.shape) <= 3:
self.generative_model_states = generative_model_states.copy()[:,:,:,None]
else:
self.generative_model_states = generative_model_states.copy()
self.generative_model_rewards = generative_model_rewards.copy()
self.transition_matrix_context = transition_matrix_context.copy()
self.prior_rewards = prior_rewards.copy()
self.prior_states = prior_states.copy()
self.prior_policies = prior_policies.copy()
self.npi = prior_policies.shape[0]
self.T = T
self.nh = prior_states.shape[0]
self.pol_lambda = pol_lambda
self.r_lambda = r_lambda
self.non_decaying = non_decaying
self.dec_temp = dec_temp
if len(generative_model_rewards.shape) > 2:
self.infer_context = True
self.nc = generative_model_rewards.shape[2]
else:
self.nc = 1
self.generative_model_rewards = self.generative_model_rewards[:,:,np.newaxis]
if dirichlet_pol_params is not None:
self.dirichlet_pol_params = dirichlet_pol_params.copy()
if dirichlet_rew_params is not None:
self.dirichlet_rew_params = dirichlet_rew_params.copy()
if generative_model_context is not None:
self.generative_model_context = generative_model_context.copy()
for c in range(self.nc):
for state in range(self.nh):
self.generative_model_rewards[:,state,c] = self.dirichlet_rew_params[:,state,c] / self.dirichlet_rew_params[:,state,c].sum()
# self.generative_model_rewards[:,state,c] =\
# np.exp(scs.digamma(self.dirichlet_rew_params[:,state,c])\
# -scs.digamma(self.dirichlet_rew_params[:,state,c].sum()))
# self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
def reset(self, params, fixed):
alphas = np.zeros((self.npi, self.nc)) + params
self.generative_model_rewards[:] = fixed['rew_mod'].copy()
self.dirichlet_rew_params[:] = fixed['beta_rew'].copy()
self.prior_policies[:] = alphas / alphas.sum(axis=0)[None,:]
self.dirichlet_pol_params = alphas
def instantiate_messages(self, policies):
self.bwd_messages = np.zeros((self.nh, self.T, self.npi, self.nc))
self.bwd_messages[:,-1,:, :] = 1./self.nh
self.fwd_messages = np.zeros((self.nh, self.T, self.npi, self.nc))
self.fwd_messages[:, 0, :, :] = self.prior_states[:, np.newaxis, np.newaxis]
self.fwd_norms = np.zeros((self.T+1, self.npi, self.nc))
self.fwd_norms[0,:,:] = 1.
self.obs_messages = np.zeros((self.nh, self.T, self.nc)) + 1/self.nh#self.prior_observations.dot(self.generative_model_observations)
#self.obs_messages = np.tile(self.obs_messages,(self.T,1)).T
self.rew_messages = np.zeros((self.nh, self.T, self.nc))
#self.rew_messages[:] = np.tile(self.prior_rewards.dot(self.generative_model_rewards),(self.T,1)).T
for c in range(self.nc):
self.rew_messages[:,:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
for pi, cstates in enumerate(policies):
for t, u in enumerate(np.flip(cstates, axis = 0)):
tp = self.T - 2 - t
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp+1,pi,c]*\
self.obs_messages[:, tp+1,c]*\
self.rew_messages[:, tp+1,c]
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp,pi,c]\
.dot(self.generative_model_states[:,:,u,c])
self.bwd_messages[:,tp, pi,c] /= self.bwd_messages[:,tp,pi,c].sum()
def update_messages(self, t, pi, cs, c=0):
if t > 0:
for i, u in enumerate(np.flip(cs[:t], axis = 0)):
self.bwd_messages[:,t-1-i,pi,c] = self.bwd_messages[:,t-i,pi,c]*\
self.obs_messages[:,t-i,c]*\
self.rew_messages[:, t-i,c]
self.bwd_messages[:,t-1-i,pi,c] = self.bwd_messages[:,t-1-i,pi,c]\
.dot(self.generative_model_states[:,:,u,c])
norm = self.bwd_messages[:,t-1-i,pi,c].sum()
if norm > 0:
self.bwd_messages[:,t-1-i, pi,c] /= norm
if len(cs[t:]) > 0:
for i, u in enumerate(cs[t:]):
self.fwd_messages[:, t+1+i, pi,c] = self.fwd_messages[:,t+i, pi,c]*\
self.obs_messages[:, t+i,c]*\
self.rew_messages[:, t+i,c]
self.fwd_messages[:, t+1+i, pi,c] = \
self.generative_model_states[:,:,u,c].\
dot(self.fwd_messages[:, t+1+i, pi,c])
self.fwd_norms[t+1+i,pi,c] = self.fwd_messages[:,t+1+i,pi,c].sum()
if self.fwd_norms[t+1+i, pi,c] > 0: #???? Shouldn't this not happen?
self.fwd_messages[:,t+1+i, pi,c] /= self.fwd_norms[t+1+i,pi,c]
def reset_preferences(self, t, new_preference, policies):
self.prior_rewards = new_preference.copy()
for c in range(self.nc):
self.rew_messages[:,:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
for pi, cstates in enumerate(policies[t:]):
for i, u in enumerate(np.flip(cstates, axis = 0)):
tp = self.T - 2 - i
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp+1,pi,c]*\
self.obs_messages[:, tp+1,c]*\
self.rew_messages[:, tp+1,c]
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp,pi,c]\
.dot(self.generative_model_states[:,:,u,c])
self.bwd_messages[:,tp, pi,c] /= self.bwd_messages[:,tp,pi,c].sum()
def update_beliefs_states(self, tau, t, observation, reward, policies, possible_policies):
#estimate expected state distribution
if t == 0:
self.instantiate_messages(policies)
self.obs_messages[:,t,:] = self.generative_model_observations[observation][:,np.newaxis]
self.rew_messages[:,t,:] = self.generative_model_rewards[reward]
for c in range(self.nc):
for pi, cs in enumerate(policies):
if self.prior_policies[pi,c] > 1e-15 and pi in possible_policies:
self.update_messages(t, pi, cs, c)
else:
self.fwd_messages[:,:,pi,c] = 0#1./self.nh
#estimate posterior state distribution
posterior = self.fwd_messages*self.bwd_messages*self.obs_messages[:,:,np.newaxis,:]*self.rew_messages[:,:,np.newaxis,:]
norm = posterior.sum(axis = 0)
self.fwd_norms[-1] = norm[-1]
non_zero = norm > 0
posterior[:,non_zero] /= norm[non_zero]
return np.nan_to_num(posterior)
def update_beliefs_policies(self, tau, t):
#print((prior_policies>1e-4).sum())
likelihood = self.fwd_norms.prod(axis=0)
posterior = np.power(likelihood, self.dec_temp) * self.prior_policies
likelihood /= likelihood.sum(axis=0)[np.newaxis,:]
posterior/= posterior.sum(axis=0)[np.newaxis,:]
posterior = np.nan_to_num(posterior)
#posterior = softmax(ln(self.fwd_norms).sum(axis = 0)+ln(self.prior_policies))
#np.testing.assert_allclose(post, posterior)
return posterior, likelihood
def update_beliefs_context(self, tau, t, reward, posterior_states, posterior_policies, prior_context, policies, context=None):
post_policies = (prior_context[np.newaxis,:] * posterior_policies).sum(axis=1)
beta = self.dirichlet_rew_params.copy()
states = (posterior_states[:,t,:] * post_policies[np.newaxis,:,np.newaxis]).sum(axis=1)
beta_prime = self.dirichlet_rew_params.copy()
beta_prime[reward] = beta[reward] + states
# for c in range(self.nc):
# for state in range(self.nh):
# self.generative_model_rewards[:,state,c] =\
# np.exp(scs.digamma(beta_prime[:,state,c])\
# -scs.digamma(beta_prime[:,state,c].sum()))
# self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
#
# self.rew_messages[:,t+1:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
#
# for c in range(self.nc):
# for pi, cs in enumerate(policies):
# if self.prior_policies[pi,c] > 1e-15:
# self.update_messages(t, pi, cs, c)
# else:
# self.fwd_messages[:,:,pi,c] = 1./self.nh #0
alpha = self.dirichlet_pol_params.copy()
if t == self.T-1:
chosen_pol = np.argmax(post_policies)
inf_context = np.argmax(prior_context)
alpha_prime = self.dirichlet_pol_params.copy()
alpha_prime[chosen_pol,:] += prior_context
#alpha_prime[chosen_pol,inf_context] = self.dirichlet_pol_params[chosen_pol,inf_context] + 1
else:
alpha_prime = alpha
if self.nc == 1:
posterior = np.ones(1)
else:
# todo: recalc
#outcome_surprise = ((states * prior_context[np.newaxis,:]).sum(axis=1)[:,np.newaxis] * (scs.digamma(beta_prime[reward]) - scs.digamma(beta_prime.sum(axis=0)))).sum(axis=0)
if t>0:
outcome_surprise = (posterior_policies * ln(self.fwd_norms.prod(axis=0))).sum(axis=0)
entropy = - (posterior_policies * ln(posterior_policies)).sum(axis=0)
#policy_surprise = (post_policies[:,np.newaxis] * scs.digamma(alpha_prime)).sum(axis=0) - scs.digamma(alpha_prime.sum(axis=0))
policy_surprise = (posterior_policies * scs.digamma(alpha_prime)).sum(axis=0) - scs.digamma(alpha_prime.sum(axis=0))
else:
outcome_surprise = 0
entropy = 0
policy_surprise = 0
if context is not None:
context_obs_suprise = ln(self.generative_model_context[context]+1e-10)
else:
context_obs_suprise = 0
posterior = outcome_surprise + policy_surprise + entropy + context_obs_suprise
#+ np.nan_to_num((posterior_policies * ln(self.fwd_norms).sum(axis = 0))).sum(axis=0)#\
# if tau in range(90,120) and t == 1:
# #print(tau, np.exp(outcome_surprise), np.exp(policy_surprise))
# print(tau, np.exp(outcome_surprise[1])/np.exp(outcome_surprise[0]), np.exp(policy_surprise[1])/np.exp(policy_surprise[0]))
posterior = np.nan_to_num(softmax(posterior+ln(prior_context)))
return posterior
def update_beliefs_dirichlet_pol_params(self, tau, t, posterior_policies, posterior_context = [1]):
assert(t == self.T-1)
chosen_pol = np.argmax(posterior_policies, axis=0)
# self.dirichlet_pol_params[chosen_pol,:] += posterior_context.sum(axis=0)/posterior_context.sum()
self.dirichlet_pol_params = (1-self.pol_lambda) * self.dirichlet_pol_params + 1 - (1-self.pol_lambda)
self.dirichlet_pol_params[chosen_pol,:] += posterior_context
self.prior_policies[:] = self.dirichlet_pol_params.copy() #np.exp(scs.digamma(self.dirichlet_pol_params) - scs.digamma(self.dirichlet_pol_params.sum(axis=0))[np.newaxis,:])
self.prior_policies /= self.prior_policies.sum(axis=0)[np.newaxis,:]
return self.dirichlet_pol_params, self.prior_policies
def update_beliefs_dirichlet_rew_params(self, tau, t, reward, posterior_states, posterior_policies, posterior_context = [1]):
states = (posterior_states[:,t,:,:] * posterior_policies[np.newaxis,:,:]).sum(axis=1)
old = self.dirichlet_rew_params.copy()
# c = np.argmax(posterior_context)
# self.dirichlet_rew_params[reward,:,c] += states[:,c]
self.dirichlet_rew_params[:,self.non_decaying:,:] = (1-self.r_lambda) * self.dirichlet_rew_params[:,self.non_decaying:,:] +1 - (1-self.r_lambda)
self.dirichlet_rew_params[reward,:,:] += states * posterior_context[np.newaxis,:]
for c in range(self.nc):
for state in range(self.nh):
#self.generative_model_rewards[:,state,c] = self.dirichlet_rew_params[:,state,c] / self.dirichlet_rew_params[:,state,c].sum()
self.generative_model_rewards[:,state,c] =\
np.exp(scs.digamma(self.dirichlet_rew_params[:,state,c])\
-scs.digamma(self.dirichlet_rew_params[:,state,c].sum()))
self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
self.rew_messages[:,t+1:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
# for c in range(self.nc):
# for pi, cs in enumerate(policies):
# if self.prior_policies[pi,c] > 1e-15:
# self.update_messages(t, pi, cs, c)
# else:
# self.fwd_messages[:,:,pi,c] = 1./self.nh #0
return self.dirichlet_rew_params
class TwoStepPerception(object):
def __init__(self,
generative_model_observations,
generative_model_states,
generative_model_rewards,
transition_matrix_context,
prior_states,
prior_rewards,
prior_policies,
dirichlet_pol_params = None,
dirichlet_rew_params = None,
T=5):
self.generative_model_observations = generative_model_observations.copy()
self.generative_model_states = generative_model_states.copy()
self.generative_model_rewards = generative_model_rewards.copy()
self.transition_matrix_context = transition_matrix_context.copy()
self.prior_rewards = prior_rewards.copy()
self.prior_states = prior_states.copy()
self.prior_policies = prior_policies.copy()
self.T = T
self.nh = prior_states.shape[0]
if len(generative_model_rewards.shape) > 2:
self.infer_context = True
self.nc = generative_model_rewards.shape[2]
else:
self.nc = 1
self.generative_model_rewards = self.generative_model_rewards[:,:,np.newaxis]
if dirichlet_pol_params is not None:
self.dirichlet_pol_params = dirichlet_pol_params.copy()
if dirichlet_rew_params is not None:
self.dirichlet_rew_params = dirichlet_rew_params.copy()
for c in range(self.nc):
for state in range(self.nh):
self.generative_model_rewards[:,state,c] =\
np.exp(scs.digamma(self.dirichlet_rew_params[:,state,c])\
-scs.digamma(self.dirichlet_rew_params[:,state,c].sum()))
self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
def instantiate_messages(self, policies):
npi = policies.shape[0]
self.bwd_messages = np.zeros((self.nh, self.T, npi, self.nc))
self.bwd_messages[:,-1,:, :] = 1./self.nh
self.fwd_messages = np.zeros((self.nh, self.T, npi, self.nc))
self.fwd_messages[:, 0, :, :] = self.prior_states[:, np.newaxis, np.newaxis]
self.fwd_norms = np.zeros((self.T+1, npi, self.nc))
self.fwd_norms[0,:,:] = 1.
self.obs_messages = np.zeros((self.nh, self.T, self.nc)) + 1/self.nh#self.prior_observations.dot(self.generative_model_observations)
#self.obs_messages = np.tile(self.obs_messages,(self.T,1)).T
self.rew_messages = np.zeros((self.nh, self.T, self.nc))
#self.rew_messages[:] = np.tile(self.prior_rewards.dot(self.generative_model_rewards),(self.T,1)).T
for c in range(self.nc):
self.rew_messages[:,:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
for pi, cstates in enumerate(policies):
for t, u in enumerate(np.flip(cstates, axis = 0)):
tp = self.T - 2 - t
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp+1,pi,c]*\
self.obs_messages[:, tp+1,c]*\
self.rew_messages[:, tp+1,c]
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp,pi,c]\
.dot(self.generative_model_states[:,:,u])
self.bwd_messages[:,tp, pi,c] /= self.bwd_messages[:,tp,pi,c].sum()
def update_messages(self, t, pi, cs, c=0):
if t > 0:
for i, u in enumerate(np.flip(cs[:t], axis = 0)):
self.bwd_messages[:,t-1-i,pi,c] = self.bwd_messages[:,t-i,pi,c]*\
self.obs_messages[:,t-i,c]*\
self.rew_messages[:, t-i,c]
self.bwd_messages[:,t-1-i,pi,c] = self.bwd_messages[:,t-1-i,pi,c]\
.dot(self.generative_model_states[:,:,u])
norm = self.bwd_messages[:,t-1-i,pi,c].sum()
if norm > 0:
self.bwd_messages[:,t-1-i, pi,c] /= norm
if len(cs[t:]) > 0:
for i, u in enumerate(cs[t:]):
self.fwd_messages[:, t+1+i, pi,c] = self.fwd_messages[:,t+i, pi,c]*\
self.obs_messages[:, t+i,c]*\
self.rew_messages[:, t+i,c]
self.fwd_messages[:, t+1+i, pi,c] = \
self.generative_model_states[:,:,u].\
dot(self.fwd_messages[:, t+1+i, pi,c])
self.fwd_norms[t+1+i,pi,c] = self.fwd_messages[:,t+1+i,pi,c].sum()
if self.fwd_norms[t+1+i, pi,c] > 0: #???? Shouldn't this not happen?
self.fwd_messages[:,t+1+i, pi,c] /= self.fwd_norms[t+1+i,pi,c]
def reset_preferences(self, t, new_preference, policies):
self.prior_rewards = new_preference.copy()
for c in range(self.nc):
self.rew_messages[:,:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
for pi, cstates in enumerate(policies[t:]):
for i, u in enumerate(np.flip(cstates, axis = 0)):
tp = self.T - 2 - i
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp+1,pi,c]*\
self.obs_messages[:, tp+1,c]*\
self.rew_messages[:, tp+1,c]
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp,pi,c]\
.dot(self.generative_model_states[:,:,u])
self.bwd_messages[:,tp, pi,c] /= self.bwd_messages[:,tp,pi,c].sum()
def update_beliefs_states(self, tau, t, observation, reward, policies, possible_policies):
#estimate expected state distribution
if t == 0:
self.instantiate_messages(policies)
self.obs_messages[:,t,:] = self.generative_model_observations[observation][:,np.newaxis]
self.rew_messages[:,t,:] = self.generative_model_rewards[reward]
for c in range(self.nc):
for pi, cs in enumerate(policies):
if self.prior_policies[pi,c] > 1e-15 and pi in possible_policies:
self.update_messages(t, pi, cs, c)
else:
self.fwd_messages[:,:,pi,c] = 0#1./self.nh #0
#estimate posterior state distribution
posterior = self.fwd_messages*self.bwd_messages*self.obs_messages[:,:,np.newaxis,:]*self.rew_messages[:,:,np.newaxis,:]
norm = posterior.sum(axis = 0)
self.fwd_norms[-1] = norm[-1]
posterior /= norm
return np.nan_to_num(posterior)
def update_beliefs_policies(self, tau, t, gamma=4):
#print((prior_policies>1e-4).sum())
likelihood = self.fwd_norms.prod(axis=0)
posterior = np.power(likelihood,gamma) * self.prior_policies
posterior/= posterior.sum(axis=0)[np.newaxis,:]
#posterior = softmax(ln(self.fwd_norms).sum(axis = 0)+ln(self.prior_policies))
#np.testing.assert_allclose(post, posterior)
return posterior, likelihood
def update_beliefs_context(self, tau, t, reward, posterior_states, posterior_policies, prior_context, policies):
post_policies = (prior_context[np.newaxis,:] * posterior_policies).sum(axis=1)
beta = self.dirichlet_rew_params.copy()
states = (posterior_states[:,t,:] * post_policies[np.newaxis,:,np.newaxis]).sum(axis=1)
beta_prime = self.dirichlet_rew_params.copy()
beta_prime[reward] = beta[reward] + states
# for c in range(self.nc):
# for state in range(self.nh):
# self.generative_model_rewards[:,state,c] =\
# np.exp(scs.digamma(beta_prime[:,state,c])\
# -scs.digamma(beta_prime[:,state,c].sum()))
# self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
#
# self.rew_messages[:,t+1:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
#
# for c in range(self.nc):
# for pi, cs in enumerate(policies):
# if self.prior_policies[pi,c] > 1e-15:
# self.update_messages(t, pi, cs, c)
# else:
# self.fwd_messages[:,:,pi,c] = 1./self.nh #0
alpha = self.dirichlet_pol_params.copy()
if t == self.T-1:
chosen_pol = np.argmax(post_policies)
inf_context = np.argmax(prior_context)
alpha_prime = self.dirichlet_pol_params.copy()
alpha_prime[chosen_pol,:] += prior_context
#alpha_prime[chosen_pol,inf_context] = self.dirichlet_pol_params[chosen_pol,inf_context] + 1
else:
alpha_prime = alpha
if self.nc == 1:
posterior = np.ones(1)
else:
# todo: recalc
#outcome_surprise = ((states * prior_context[np.newaxis,:]).sum(axis=1)[:,np.newaxis] * (scs.digamma(beta_prime[reward]) - scs.digamma(beta_prime.sum(axis=0)))).sum(axis=0)
outcome_surprise = (posterior_policies * ln(self.fwd_norms.prod(axis=0))).sum(axis=0)
entropy = - (posterior_policies * ln(posterior_policies)).sum(axis=0)
#policy_surprise = (post_policies[:,np.newaxis] * scs.digamma(alpha_prime)).sum(axis=0) - scs.digamma(alpha_prime.sum(axis=0))
policy_surprise = (posterior_policies * scs.digamma(alpha_prime)).sum(axis=0) - scs.digamma(alpha_prime.sum(axis=0))
posterior = outcome_surprise + policy_surprise + entropy
#+ np.nan_to_num((posterior_policies * ln(self.fwd_norms).sum(axis = 0))).sum(axis=0)#\
# if tau in range(90,120) and t == 1:
# #print(tau, np.exp(outcome_surprise), np.exp(policy_surprise))
# print(tau, np.exp(outcome_surprise[1])/np.exp(outcome_surprise[0]), np.exp(policy_surprise[1])/np.exp(policy_surprise[0]))
posterior = np.nan_to_num(softmax(posterior+ln(prior_context)))
return posterior
def update_beliefs_dirichlet_pol_params(self, tau, t, posterior_policies, posterior_context = [1]):
assert(t == self.T-1)
chosen_pol = np.argmax(posterior_policies, axis=0)
# self.dirichlet_pol_params[chosen_pol,:] += posterior_context.sum(axis=0)/posterior_context.sum()
alpha = 0.3#0.3
self.dirichlet_pol_params = (1-alpha) * self.dirichlet_pol_params + 1 - (1-alpha)
self.dirichlet_pol_params[chosen_pol,:] += posterior_context
self.prior_policies[:] = np.exp(scs.digamma(self.dirichlet_pol_params) - scs.digamma(self.dirichlet_pol_params.sum(axis=0))[np.newaxis,:])
self.prior_policies /= self.prior_policies.sum(axis=0)
return self.dirichlet_pol_params
def update_beliefs_dirichlet_rew_params(self, tau, t, reward, posterior_states, posterior_policies, posterior_context = [1]):
states = (posterior_states[:,t,:,:] * posterior_policies[np.newaxis,:,:]).sum(axis=1)
state = np.argmax(states)
old = self.dirichlet_rew_params.copy()
# self.dirichlet_rew_params[:,state,:] = (1-0.4) * self.dirichlet_rew_params[:,state,:] #+1 - (1-0.4)
# self.dirichlet_rew_params[reward,state,:] += 1#states * posterior_context[np.newaxis,:]
alpha = 0.6#0.3#1#0.3#0.05
self.dirichlet_rew_params[:,3:,:] = (1-alpha) * self.dirichlet_rew_params[:,3:,:] +1 - (1-alpha)
self.dirichlet_rew_params[reward,:,:] += states * posterior_context[np.newaxis,:]
for c in range(self.nc):
for state in range(self.nh):
self.generative_model_rewards[:,state,c] =\
np.exp(scs.digamma(self.dirichlet_rew_params[:,state,c])\
-scs.digamma(self.dirichlet_rew_params[:,state,c].sum()))
self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
self.rew_messages[:,t+1:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
# for c in range(self.nc):
# for pi, cs in enumerate(policies):
# if self.prior_policies[pi,c] > 1e-15:
# self.update_messages(t, pi, cs, c)
# else:
# self.fwd_messages[:,:,pi,c] = 1./self.nh #0
return self.dirichlet_rew_params
| 50.789077
| 184
| 0.58482
| 3,496
| 26,969
| 4.298913
| 0.042048
| 0.078848
| 0.075853
| 0.06747
| 0.932464
| 0.910041
| 0.896201
| 0.886686
| 0.859937
| 0.855346
| 0
| 0.013603
| 0.274945
| 26,969
| 530
| 185
| 50.884906
| 0.754986
| 0.203864
| 0
| 0.810345
| 0
| 0
| 0.000702
| 0
| 0
| 0
| 0
| 0.001887
| 0.005747
| 1
| 0.054598
| false
| 0
| 0.011494
| 0
| 0.100575
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
297e8c5d4fa93ef3ca30a8e53de4d1ce42c7db3b
| 2,760
|
py
|
Python
|
tfmunir/bi/models.py
|
cfvelez/tfmunir
|
11f5b5ba01a079743d7cc5f880768f8b9fe75376
|
[
"Apache-2.0"
] | null | null | null |
tfmunir/bi/models.py
|
cfvelez/tfmunir
|
11f5b5ba01a079743d7cc5f880768f8b9fe75376
|
[
"Apache-2.0"
] | null | null | null |
tfmunir/bi/models.py
|
cfvelez/tfmunir
|
11f5b5ba01a079743d7cc5f880768f8b9fe75376
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
# Create your models here.
class Locations(models.Model):
code = models.CharField(primary_key=True, max_length=4)
name_en = models.CharField(max_length=32, blank=True, null=True)
name_es = models.CharField(max_length=32, blank=True, null=True)
class Meta:
managed = False
db_table = 'locations'
class Products(models.Model):
code = models.CharField(primary_key=True, max_length=4)
name_en = models.CharField(max_length=100, blank=True, null=True)
name_es = models.CharField(max_length=100, blank=True, null=True)
class Meta:
managed = False
db_table = 'products'
class Trading(models.Model):
year = models.CharField(max_length=4, blank=True, null=True)
product_code = models.CharField(max_length=6, blank=True, null=True)
location_code = models.CharField(max_length=6, blank=True, null=True)
location_partner_code = models.CharField(max_length=6, blank=True, null=True)
export_value = models.FloatField(blank=True, null=True)
import_value = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'trading'
class TradingSummary(models.Model):
year = models.CharField(max_length=4, blank=True, null=True)
location_code = models.CharField(max_length=6, blank=True, null=True)
export_value = models.FloatField(blank=True, null=True)
import_value = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'trading_summary'
class Unemployment(models.Model):
location_name_es = models.CharField(max_length=100, blank=True, null=True)
location_code = models.CharField(max_length=6, blank=True, null=True)
year = models.CharField(max_length=4, blank=True, null=True)
value = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'unemployment'
class TradingLocationExport(models.Model):
year = models.CharField(max_length=4, blank=True, null=True)
location_code = models.CharField(max_length=6, blank=True, null=True)
location_partner_code = models.CharField(max_length=6, blank=True, null=True)
export_value = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'trading_location_export'
class TradingLocationImport(models.Model):
year = models.CharField(max_length=4, blank=True, null=True)
location_code = models.CharField(max_length=6, blank=True, null=True)
location_partner_code = models.CharField(max_length=6, blank=True, null=True)
import_value = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'trading_location_import'
| 34.936709
| 81
| 0.71558
| 372
| 2,760
| 5.150538
| 0.120968
| 0.122129
| 0.176409
| 0.230689
| 0.872129
| 0.872129
| 0.872129
| 0.872129
| 0.872129
| 0.868476
| 0
| 0.012747
| 0.175725
| 2,760
| 79
| 82
| 34.936709
| 0.829451
| 0.008696
| 0
| 0.614035
| 0
| 0
| 0.035466
| 0.016819
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.105263
| 0
| 0.842105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
46516e6a96817977429d608e0a37ddacb64f1c19
| 230
|
py
|
Python
|
CodeWars/Python/8 kyu/Find the Integral/main.py
|
opastushkov/codewars-solutions
|
0132a24259a4e87f926048318332dcb4d94858ca
|
[
"MIT"
] | null | null | null |
CodeWars/Python/8 kyu/Find the Integral/main.py
|
opastushkov/codewars-solutions
|
0132a24259a4e87f926048318332dcb4d94858ca
|
[
"MIT"
] | null | null | null |
CodeWars/Python/8 kyu/Find the Integral/main.py
|
opastushkov/codewars-solutions
|
0132a24259a4e87f926048318332dcb4d94858ca
|
[
"MIT"
] | null | null | null |
def integrate(coefficient, exponent):
return '{0}x^{1}'.format(coefficient/(exponent + 1), exponent + 1).replace('.0', '') if not coefficient/(exponent + 1) % 1 else '{0}x^{1}'.format(coefficient/(exponent + 1), exponent + 1)
| 115
| 192
| 0.656522
| 32
| 230
| 4.71875
| 0.40625
| 0.298013
| 0.397351
| 0.119205
| 0.503311
| 0.503311
| 0.503311
| 0.503311
| 0.503311
| 0
| 0
| 0.054726
| 0.126087
| 230
| 2
| 192
| 115
| 0.696517
| 0
| 0
| 0
| 0
| 0
| 0.077922
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
467f74f66510a5c370b13a19ed18206a6198c5bd
| 23,172
|
py
|
Python
|
utils/train_utils.py
|
HuchieWuchie/AffordanceNet
|
8827ce3cfc8dcd6a7909641099b4807568aa698c
|
[
"MIT"
] | 9
|
2021-11-02T11:18:52.000Z
|
2022-02-15T12:58:03.000Z
|
utils/train_utils.py
|
HuchieWuchie/AffordanceNet
|
8827ce3cfc8dcd6a7909641099b4807568aa698c
|
[
"MIT"
] | 1
|
2021-12-01T02:50:16.000Z
|
2021-12-02T13:50:05.000Z
|
utils/train_utils.py
|
HuchieWuchie/AffordanceNet
|
8827ce3cfc8dcd6a7909641099b4807568aa698c
|
[
"MIT"
] | 2
|
2021-11-04T10:09:40.000Z
|
2022-02-06T17:19:48.000Z
|
import tensorflow as tf
import math
from utils import bbox_utils
from tensorflow import keras
import tensorflow.keras.losses as KLoss
def get_step_size(total_items, batch_size):
"""Get step size for given total item size and batch size.
inputs:
total_items = number of total items
batch_size = number of batch size during training or validation
outputs:
step_size = number of step size for model training
"""
return math.floor(total_items / batch_size)
def randomly_select_xyz_mask(mask, select_xyz):
"""Selecting x, y, z number of True elements for corresponding batch and replacing others to False
inputs:
mask = (batch_size, [m_bool_value])
select_xyz = ([x_y_z_number_for_corresponding_batch])
example = tf.constant([128, 50, 42], dtype=tf.int32)
outputs:
selected_valid_mask = (batch_size, [m_bool_value])
"""
maxval = tf.reduce_max(select_xyz) * 10
random_mask = tf.random.uniform(tf.shape(mask), minval=1, maxval=maxval, dtype=tf.int32)
multiplied_mask = tf.cast(mask, tf.int32) * random_mask
sorted_mask = tf.argsort(multiplied_mask, direction="DESCENDING")
sorted_mask_indices = tf.argsort(sorted_mask)
selected_mask = tf.less(sorted_mask_indices, tf.expand_dims(select_xyz, 1))
return tf.logical_and(mask, selected_mask)
def iit_generator(dataset, anchors, cfg):
"""Tensorflow data generator for fit method, yielding inputs and outputs.
inputs:
dataset = tf.data.Dataset, PaddedBatchDataset
anchors = (total_anchors, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
hyper_params = dictionary
outputs:
yield inputs, outputs
"""
while True:
for image_data in dataset:
if cfg.MASK_REG:
img, gt_boxes, gt_labels, gt_mask, gt_seg_mask_inds = image_data
else:
img, gt_boxes, gt_labels = image_data
bbox_deltas, bbox_labels = calculate_rpn_actual_outputs(anchors, gt_boxes, gt_labels, cfg)
if cfg.MASK_REG:
yield (img, gt_boxes, gt_labels, bbox_deltas, bbox_labels, gt_mask, gt_seg_mask_inds), \
(tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32),
tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32),
tf.constant(0., dtype=tf.float32))
else:
yield (img, gt_boxes, gt_labels, bbox_deltas, bbox_labels), \
(tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32),
tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32))
def iit_generator_inference_no_resize(dataset, cfg):
"""Tensorflow data generator for fit method, yielding inputs and outputs.
inputs:
dataset = tf.data.Dataset, PaddedBatchDataset
anchors = (total_anchors, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
hyper_params = dictionary
outputs:
yield inputs, outputs
"""
while True:
for image_data in dataset:
if cfg.MASK_REG:
img, img_shape, gt_boxes, gt_labels, gt_mask, gt_seg_mask_inds = image_data
else:
img, img_shape, gt_boxes, gt_labels = image_data
img_shape = tf.constant([[img.shape[1], img.shape[2]]])
if cfg.MASK_REG:
yield (img, img_shape, gt_boxes, gt_labels, gt_mask, gt_seg_mask_inds)
else:
yield (img, img_shape, gt_boxes, gt_labels)
def iit_generator_no_resize(dataset, cfg, base_anchors):
"""Tensorflow data generator for fit method, yielding inputs and outputs.
inputs:
dataset = tf.data.Dataset, PaddedBatchDataset
anchors = (total_anchors, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
hyper_params = dictionary
outputs:
yield inputs, outputs
"""
while True:
for image_data in dataset:
if cfg.MASK_REG:
img, img_shape, gt_boxes, gt_labels, gt_mask, gt_seg_mask_inds = image_data
else:
img, img_shape, gt_boxes, gt_labels = image_data
img_shape = tf.constant([[img.shape[1], img.shape[2]]])
feature_map_shape = (tf.cast(tf.floor(img.shape[1]/16), tf.int32), tf.cast(tf.floor(img.shape[2]/16), tf.int32))
anchors = bbox_utils.generate_anchors(feature_map_shape, base_anchors)
bbox_deltas, bbox_labels = calculate_rpn_actual_outputs_no_resize(anchors, gt_boxes, gt_labels, cfg, feature_map_shape)
if cfg.MASK_REG:
yield (img, img_shape, gt_boxes, gt_labels, bbox_deltas, bbox_labels, gt_mask, gt_seg_mask_inds), \
(tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32),
tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32),
tf.constant(0., dtype=tf.float32))
else:
yield (img, img_shape, gt_boxes, gt_labels, bbox_deltas, bbox_labels), \
(tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32),
tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32))
def iit_generator_no_resize_extra_loss(dataset, cfg, base_anchors):
"""Tensorflow data generator for fit method, yielding inputs and outputs.
inputs:
dataset = tf.data.Dataset, PaddedBatchDataset
anchors = (total_anchors, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
hyper_params = dictionary
outputs:
yield inputs, outputs
"""
while True:
for image_data in dataset:
if cfg.MASK_REG:
img, img_shape, gt_boxes, gt_labels, gt_mask, gt_seg_mask_inds = image_data
else:
img, img_shape, gt_boxes, gt_labels = image_data
img_shape = tf.constant([[img.shape[1], img.shape[2]]])
feature_map_shape = (tf.cast(tf.floor(img.shape[1]/16), tf.int32), tf.cast(tf.floor(img.shape[2]/16), tf.int32))
anchors = bbox_utils.generate_anchors(feature_map_shape, base_anchors)
bbox_deltas, bbox_labels = calculate_rpn_actual_outputs_no_resize(anchors, gt_boxes, gt_labels, cfg, feature_map_shape)
if cfg.MASK_REG:
yield (img, img_shape, gt_boxes, gt_labels, bbox_deltas, bbox_labels, gt_mask, gt_seg_mask_inds), \
(tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32),
tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32),
tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32))
else:
yield (img, img_shape, gt_boxes, gt_labels, bbox_deltas, bbox_labels), \
(tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32),
tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32))
def faster_rcnn_generator(dataset, anchors, cfg):
"""Tensorflow data generator for fit method, yielding inputs and outputs.
inputs:
dataset = tf.data.Dataset, PaddedBatchDataset
anchors = (total_anchors, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
hyper_params = dictionary
outputs:
yield inputs, outputs
"""
while True:
for image_data in dataset:
img, gt_boxes, gt_labels = image_data
# print('image_data', image_data)
bbox_deltas, bbox_labels = calculate_rpn_actual_outputs(anchors, gt_boxes, gt_labels, cfg)
yield (img, gt_boxes, gt_labels, bbox_deltas, bbox_labels), (tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32), tf.constant(0., dtype=tf.float32))
def rpn_generator(dataset, anchors, hyper_params):
"""Tensorflow data generator for fit method, yielding inputs and outputs.
inputs:
dataset = tf.data.Dataset, PaddedBatchDataset
anchors = (total_anchors, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
hyper_params = dictionary
outputs:
yield inputs, outputs
"""
while True:
for image_data in dataset:
img, gt_boxes, gt_labels = image_data
bbox_deltas, bbox_labels = calculate_rpn_actual_outputs(anchors, gt_boxes, gt_labels, hyper_params)
yield img, (bbox_deltas, bbox_labels)
def calculate_rpn_actual_outputs(anchors, gt_boxes, gt_labels, cfg):
"""Generating one step data for training or inference.
Batch operations supported.
inputs:
anchors = (total_anchors, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
gt_boxes (batch_size, gt_box_size, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
gt_labels (batch_size, gt_box_size)
hyper_params = dictionary
outputs:
bbox_deltas = (batch_size, total_anchors, [delta_y, delta_x, delta_h, delta_w])
bbox_labels = (batch_size, feature_map_shape, feature_map_shape, anchor_count)
"""
batch_size = tf.shape(gt_boxes)[0]
feature_map_shape = cfg.FEATURE_MAP_SHAPE
anchor_count = cfg.ANCHOR_COUNT
total_pos_bboxes = int(cfg.RPN_BATCHSIZE * cfg.RPN_FG_FRACTION)
total_neg_bboxes = cfg.RPN_BATCHSIZE - total_pos_bboxes
variances = cfg.VARIANCES
# Calculate iou values between each bboxes and ground truth boxes
iou_map, _ = bbox_utils.generate_iou_map(anchors, gt_boxes)
# Get max index value for each row
max_indices_each_row = tf.argmax(iou_map, axis=2, output_type=tf.int32)
# Get max index value for each column
max_indices_each_column = tf.argmax(iou_map, axis=1, output_type=tf.int32)
# IoU map has iou values for every gt boxes and we merge these values column wise
merged_iou_map = tf.reduce_max(iou_map, axis=2)
#
pos_mask = tf.greater(merged_iou_map, cfg.RPN_POSITIVE_OVERLAP)
#
valid_indices_cond = tf.not_equal(gt_labels, -1)
valid_indices = tf.cast(tf.where(valid_indices_cond), tf.int32)
valid_max_indices = max_indices_each_column[valid_indices_cond]
#
scatter_bbox_indices = tf.stack([valid_indices[..., 0], valid_max_indices], 1)
max_pos_mask = tf.scatter_nd(scatter_bbox_indices, tf.fill((tf.shape(valid_indices)[0], ), True), tf.shape(pos_mask))
pos_mask = tf.logical_or(pos_mask, max_pos_mask)
pos_mask = randomly_select_xyz_mask(pos_mask, tf.constant([total_pos_bboxes], dtype=tf.int32))
#
pos_count = tf.reduce_sum(tf.cast(pos_mask, tf.int32), axis=-1)
neg_count = (total_pos_bboxes + total_neg_bboxes) - pos_count
#
neg_mask = tf.logical_and(tf.less(merged_iou_map, cfg.RPN_NEGATIVE_OVERLAP), tf.logical_not(pos_mask))
neg_mask = randomly_select_xyz_mask(neg_mask, neg_count)
#
pos_labels = tf.where(pos_mask, tf.ones_like(pos_mask, dtype=tf.float32), tf.constant(-1.0, dtype=tf.float32))
neg_labels = tf.cast(neg_mask, dtype=tf.float32)
bbox_labels = tf.add(pos_labels, neg_labels)
#
gt_boxes_map = tf.gather(gt_boxes, max_indices_each_row, batch_dims=1)
# Replace negative bboxes with zeros
expanded_gt_boxes = tf.where(tf.expand_dims(pos_mask, -1), gt_boxes_map, tf.zeros_like(gt_boxes_map))
# Calculate delta values between anchors and ground truth bboxes
bbox_deltas = bbox_utils.get_deltas_from_bboxes(anchors, expanded_gt_boxes) / variances
#
# bbox_deltas = tf.reshape(bbox_deltas, (batch_size, feature_map_shape, feature_map_shape, anchor_count * 4))
bbox_labels = tf.reshape(bbox_labels, (batch_size, feature_map_shape, feature_map_shape, anchor_count))
#
return bbox_deltas, bbox_labels
def calculate_rpn_actual_outputs_no_resize(anchors, gt_boxes, gt_labels, cfg, feature_map_shape):
"""Generating one step data for training or inference.
Batch operations supported.
inputs:
anchors = (total_anchors, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
gt_boxes (batch_size, gt_box_size, [y1, x1, y2, x2])
these values in normalized format between [0, 1]
gt_labels (batch_size, gt_box_size)
hyper_params = dictionary
outputs:
bbox_deltas = (batch_size, total_anchors, [delta_y, delta_x, delta_h, delta_w])
bbox_labels = (batch_size, feature_map_shape, feature_map_shape, anchor_count)
"""
batch_size = tf.shape(gt_boxes)[0]
anchor_count = cfg.ANCHOR_COUNT
total_pos_bboxes = int(cfg.RPN_BATCHSIZE * cfg.RPN_FG_FRACTION)
total_neg_bboxes = cfg.RPN_BATCHSIZE - total_pos_bboxes
variances = cfg.VARIANCES
# Calculate iou values between each bboxes and ground truth boxes
iou_map, _ = bbox_utils.generate_iou_map(anchors, gt_boxes)
# Get max index value for each row
max_indices_each_row = tf.argmax(iou_map, axis=2, output_type=tf.int32)
# Get max index value for each column
max_indices_each_column = tf.argmax(iou_map, axis=1, output_type=tf.int32)
# IoU map has iou values for every gt boxes and we merge these values column wise
merged_iou_map = tf.reduce_max(iou_map, axis=2)
#
pos_mask = tf.greater(merged_iou_map, cfg.RPN_POSITIVE_OVERLAP)
#
valid_indices_cond = tf.not_equal(gt_labels, -1)
valid_indices = tf.cast(tf.where(valid_indices_cond), tf.int32)
valid_max_indices = max_indices_each_column[valid_indices_cond]
#
scatter_bbox_indices = tf.stack([valid_indices[..., 0], valid_max_indices], 1)
max_pos_mask = tf.scatter_nd(scatter_bbox_indices, tf.fill((tf.shape(valid_indices)[0], ), True), tf.shape(pos_mask))
pos_mask = tf.logical_or(pos_mask, max_pos_mask)
pos_mask = randomly_select_xyz_mask(pos_mask, tf.constant([total_pos_bboxes], dtype=tf.int32))
#
pos_count = tf.reduce_sum(tf.cast(pos_mask, tf.int32), axis=-1)
neg_count = (total_pos_bboxes + total_neg_bboxes) - pos_count
#
neg_mask = tf.logical_and(tf.less(merged_iou_map, cfg.RPN_NEGATIVE_OVERLAP), tf.logical_not(pos_mask))
neg_mask = randomly_select_xyz_mask(neg_mask, neg_count)
#
pos_labels = tf.where(pos_mask, tf.ones_like(pos_mask, dtype=tf.float32), tf.constant(-1.0, dtype=tf.float32))
neg_labels = tf.cast(neg_mask, dtype=tf.float32)
bbox_labels = tf.add(pos_labels, neg_labels)
#
gt_boxes_map = tf.gather(gt_boxes, max_indices_each_row, batch_dims=1)
# Replace negative bboxes with zeros
expanded_gt_boxes = tf.where(tf.expand_dims(pos_mask, -1), gt_boxes_map, tf.zeros_like(gt_boxes_map))
# Calculate delta values between anchors and ground truth bboxes
bbox_deltas = bbox_utils.get_deltas_from_bboxes(anchors, expanded_gt_boxes) / variances
#
# bbox_deltas = tf.reshape(bbox_deltas, (batch_size, feature_map_shape, feature_map_shape, anchor_count * 4))
bbox_labels = tf.reshape(bbox_labels, (batch_size, feature_map_shape[0], feature_map_shape[1], anchor_count))
#
return bbox_deltas, bbox_labels
def rpn_cls_loss(*args):
"""Calculating rpn class loss value.
Rpn actual class value should be 0 or 1.
Because of this we only take into account non -1 values.
inputs:
*args = could be (y_true, y_pred) or ((y_true, y_pred), )
outputs:
loss = BinaryCrossentropy value
"""
y_true, y_pred = args if len(args) == 2 else args[0]
# remove -1 values
indices = tf.where(tf.not_equal(y_true, tf.constant(-1.0, dtype=tf.float32)))
target = tf.gather_nd(y_true, indices)
output = tf.gather_nd(y_pred, indices)
lf = keras.losses.BinaryCrossentropy()
return lf(target, output)
def rpn_reg_loss(*args):
"""Calculating rpn / faster rcnn regression loss value.
Reg value should be different than zero for actual values.
Because of this we only take into account non zero values.
inputs:
*args = could be (y_true, y_pred) or ((y_true, y_pred), )
y_true (batch_size, total_anchors, deltas) = (2, 8649, 4)
y_pred (batch_size, fm, fm, deltas*anchor_count) = (2, 31, 31, 36)
target_labels (batch_size, fm, fm, anchor_count) = (2, 31, 31, 9)
outputs:
loss = smooth L1 loss
"""
y_true, y_pred, target_labels = args if len(args) == 3 else args[0]
# Reshape to
target_labels = tf.reshape(target_labels, (tf.shape(target_labels)[0], -1)) # (batch_size, num_rois)
y_pred = tf.reshape(y_pred, (tf.shape(y_pred)[0], tf.shape(y_true)[1], 4)) # (batch_size, num_rois, 4)
# Reshape to merge batch and roi dimensions for simplicity.
y_pred = tf.reshape(y_pred, (-1, 4))
y_true = tf.reshape(y_true, (-1, 4))
target_labels = tf.reshape(target_labels, (-1,))
# Only positive ROIs contribute to the loss.
positive_roi_ix = tf.where(target_labels > 0)[:, 0]
# Gather the deltas (predicted and true) that contribute to loss
y_true = tf.gather(y_true, positive_roi_ix)
y_pred = tf.gather(y_pred, positive_roi_ix)
loss = keras.backend.switch(tf.size(y_true) > tf.constant(0),
smooth_l1_loss(y_true=y_true, y_pred=y_pred),
tf.constant(0.0))
loss = keras.backend.mean(loss)
return loss
def smooth_l1_loss(y_true, y_pred):
"""Implements Smooth-L1 loss.
y_true and y_pred are typically: [N, 4], but could be any shape.
"""
diff = tf.abs(y_pred - y_true)
less_than_one = tf.cast(tf.less(diff, 1.0), "float32")
loss = (less_than_one * 0.5 * diff**2) + (1 - less_than_one) * tf.abs(diff - 0.5)
return loss
def cls_loss(*args):
"""Calculating faster rcnn class loss value.
inputs:
*args = could be (y_true, y_pred) or ((y_true, y_pred), )
y_true (batch_size, num_rois)
y_pred (batch_size, num_rois, 2)
outputs:
loss = CategoricalCrossentropy value
"""
y_true, y_pred = args if len(args) == 2 else args[0]
lf = keras.losses.CategoricalCrossentropy()
return lf(y_true, y_pred)
def reg_loss(*args):
"""Calculating rpn / faster rcnn regression loss value.
Reg value should be different than zero for actual values.
Because of this we only take into account non zero values.
inputs:
*args = could be (y_true, y_pred) or ((y_true, y_pred), )
y_true (batch_size, num_rois, deltas) = (2, 1500, 4)
y_pred (batch_size, num_rois, deltas*num_classes) = (2, 1500, 84)
target_labels (batch_size, num_rois, num_classes) = (2, 1500, 21)
outputs:
loss = smooth L1 loss
"""
y_true, y_pred, target_labels = args if len(args) == 3 else args[0]
# Reshape to (batch_size, num_rois, num_classes, 4)
y_pred = tf.reshape(y_pred, (tf.shape(y_pred)[0], tf.shape(y_true)[1], -1, 4))
# Get target labels -> decode from one hot vector
target_labels = tf.argmax(target_labels, axis=2) # (batch_size, num_rois)
# Reshape to merge batch and roi dimensions for simplicity.
y_pred = tf.reshape(y_pred, (-1, tf.shape(y_pred)[2], 4))
y_true = tf.reshape(y_true, (-1, 4))
target_labels = tf.reshape(target_labels, (-1,))
# Only positive ROIs contribute to the loss. And only
# the right class_id of each ROI. Get their indices.
positive_roi_ix = tf.where(target_labels > 0)[:, 0]
positive_roi_class_ids = tf.cast(
tf.gather(target_labels, positive_roi_ix), tf.int64)
indices = tf.stack([positive_roi_ix, positive_roi_class_ids], axis=1)
# Gather the deltas (predicted and true) that contribute to loss
y_true = tf.gather(y_true, positive_roi_ix)
y_pred = tf.gather_nd(y_pred, indices)
loss = keras.backend.switch(tf.size(y_true) > tf.constant(0),
smooth_l1_loss(y_true=y_true, y_pred=y_pred),
tf.constant(0.0))
loss = keras.backend.mean(loss)
return loss
def affordance_mask_loss(*args):
"""Loss for AffordanceNet mask.
inputs:
target_mask (batch_size, num_positive_rois, 244, 244)
pred_mask_prob (batch_size, num_positive_rois, 224, 224, 11)
"""
target_mask, pred_mask_prob = args if len(args) == 2 else args[0]
target_mask = tf.reshape(target_mask, [tf.shape(target_mask)[0], tf.shape(pred_mask_prob)[1], -1])
pred_mask_prob = tf.reshape(pred_mask_prob, [tf.shape(target_mask)[0], tf.shape(pred_mask_prob)[1], -1,
tf.shape(pred_mask_prob)[4]])
loss = KLoss.SparseCategoricalCrossentropy()
return loss(target_mask, pred_mask_prob)
def affordance_context_attr_loss(*args):
"""Calculating attribute class loss value.
Attribute actual class value should be between 0 and 9 but in one hot representation.
inputs:
*args = could be (y_true, y_pred) or ((y_true, y_pred), )
outputs:
loss = CrossEntropy value
"""
y_true, y_pred = args if len(args) == 2 else args[0]
lf = keras.losses.BinaryCrossentropy()
return lf(y_true, y_pred)
def affordance_context_reg_loss(*args):
"""Calculating rpn / faster rcnn regression loss value.
Reg value should be different than zero for actual values.
Because of this we only take into account non zero values.
inputs:
*args = could be (y_true, y_pred) or ((y_true, y_pred), )
y_true (batch_size, num_rois, deltas) = (2, 1500, 4)
y_pred (batch_size, num_rois, deltas*num_classes) = (2, 1500, 84)
target_labels (batch_size, num_rois, num_classes) = (2, 1500, 21)
outputs:
loss = smooth L1 loss
"""
y_true, y_pred, target_labels = args if len(args) == 3 else args[0]
# Reshape to (batch_size, num_rois, num_classes, 4)
y_pred = tf.reshape(y_pred, (tf.shape(y_pred)[0], tf.shape(y_true)[1], -1, 4))
# Reshape to merge batch and roi dimensions for simplicity.
y_pred = tf.reshape(y_pred, (-1, tf.shape(y_pred)[2], 4))
y_true = tf.reshape(y_true, (-1, 4))
target_labels = tf.reshape(target_labels, (-1,))
# Only positive ROIs contribute to the loss. And only
# the right class_id of each ROI. Get their indices.
positive_roi_ix = tf.where(target_labels > 0)[:, 0]
positive_roi_class_ids = tf.cast(
tf.gather(target_labels, positive_roi_ix), tf.int64)
indices = tf.stack([positive_roi_ix, positive_roi_class_ids], axis=1)
# Gather the deltas (predicted and true) that contribute to loss
y_true = tf.gather(y_true, positive_roi_ix)
y_pred = tf.gather_nd(y_pred, indices)
loss = keras.backend.switch(tf.size(y_true) > tf.constant(0),
smooth_l1_loss(y_true=y_true, y_pred=y_pred),
tf.constant(0.0))
loss = keras.backend.mean(loss)
return loss
| 45.081712
| 212
| 0.663128
| 3,395
| 23,172
| 4.26215
| 0.080118
| 0.019696
| 0.037733
| 0.036282
| 0.860539
| 0.841949
| 0.827643
| 0.821424
| 0.816655
| 0.806151
| 0
| 0.024715
| 0.231702
| 23,172
| 513
| 213
| 45.169591
| 0.788069
| 0.327896
| 0
| 0.739316
| 0
| 0
| 0.001155
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.021368
| 0
| 0.149573
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46ae55f313d74e8b94ecc68a5b06132b5f5dd791
| 66
|
py
|
Python
|
mct_homography/src/mct_homography/__init__.py
|
iorodeo/mct
|
fa8b85f36533c9b1486ca4f6b0c40c3daa6f4e11
|
[
"Apache-2.0"
] | null | null | null |
mct_homography/src/mct_homography/__init__.py
|
iorodeo/mct
|
fa8b85f36533c9b1486ca4f6b0c40c3daa6f4e11
|
[
"Apache-2.0"
] | null | null | null |
mct_homography/src/mct_homography/__init__.py
|
iorodeo/mct
|
fa8b85f36533c9b1486ca4f6b0c40c3daa6f4e11
|
[
"Apache-2.0"
] | null | null | null |
import homography_calibrator_master
import homography_calibrator
| 16.5
| 35
| 0.924242
| 7
| 66
| 8.285714
| 0.571429
| 0.551724
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075758
| 66
| 3
| 36
| 22
| 0.95082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d3caab3a891de423e0b915736f00211619ff6e0b
| 77,502
|
py
|
Python
|
build/lib/TTE.py
|
Elon-Lau/TTEkits
|
9fc356a6543735a4ead9a3735538bdba93c14db0
|
[
"MIT"
] | null | null | null |
build/lib/TTE.py
|
Elon-Lau/TTEkits
|
9fc356a6543735a4ead9a3735538bdba93c14db0
|
[
"MIT"
] | null | null | null |
build/lib/TTE.py
|
Elon-Lau/TTEkits
|
9fc356a6543735a4ead9a3735538bdba93c14db0
|
[
"MIT"
] | null | null | null |
import math
import os
import time
from datetime import datetime
from math import inf
from heapq import heappop, heappush
import collections
import functools
from collections import defaultdict
import heapq
import random
import networkx as nx
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import gurobipy as gp
from gurobipy import *
from shapely.geometry import Point,LineString
import geopandas as gpd
import osmnx as ox
class World:
"""
一个类
"""
Observation = collections.namedtuple('Observation', 'traveltime origin destination') # 起点位置的集合
def __init__(self, type=0, num=100, sigma=0, reg=0, time_limit=0.6):
"""
nodeUrl: 图对象的点的标识信息和位置信息
edgeUrl: 图对象的弧的标识信息、位置信息以及连接信息
type: 选择图对象的类型,0为small,1为normal
超参数num,sigma,reg
"""
self.type = type
self.num = num
self.sigma = sigma
self.reg = reg
self.time_limit = time_limit
def True_Graph(self):
"""
如果type=0时,加载small_model的真实图。如果type=1时,加载normal_model的真实图。如果其他情况,加载manhattan的真实图。
:return: 返回一个加载好的的图G对象
"""
if self.type == 0:
# <载入文件模块>
df_nodelist = pd.read_csv("../train_dataset/smallnodelist.csv")
df_edgelist = pd.read_csv("../train_dataset/smalledgelist.csv")
# 创建多重有向图,add_edge(1,2), add_edge(2,1)
T = nx.MultiDiGraph() # 初始化图并载入点和边模块
T.add_nodes_from(df_nodelist['node']) # 添加点auto
T.add_edges_from(zip(df_edgelist['node1'], df_edgelist['node2'])) # 添加边auto
# <设置人工网络arcTime和distance模块>
for u, v, d in T.edges(data=True):
T.edges[u, v, 0]['distance'] = 1
for u, v, d in T.edges(data=True): # 设置outside的行程时间
T.edges[u, v, 0]['arcTime'] = 1
T.edges[7, 8, 0]['arcTime'] = 4
T.edges[8, 7, 0]['arcTime'] = 4
T.edges[8, 9, 0]['arcTime'] = 4
T.edges[9, 8, 0]['arcTime'] = 4
T.edges[12, 13, 0]['arcTime'] = 4
T.edges[13, 12, 0]['arcTime'] = 4
T.edges[13, 14, 0]['arcTime'] = 4
T.edges[14, 13, 0]['arcTime'] = 4
T.edges[17, 18, 0]['arcTime'] = 4
T.edges[18, 17, 0]['arcTime'] = 4
T.edges[18, 19, 0]['arcTime'] = 4
T.edges[19, 18, 0]['arcTime'] = 4
T.edges[7, 12, 0]['arcTime'] = 4
T.edges[12, 7, 0]['arcTime'] = 4
T.edges[12, 17, 0]['arcTime'] = 4
T.edges[17, 12, 0]['arcTime'] = 4
T.edges[8, 13, 0]['arcTime'] = 4
T.edges[13, 8, 0]['arcTime'] = 4
T.edges[13, 18, 0]['arcTime'] = 4
T.edges[18, 13, 0]['arcTime'] = 4
T.edges[9, 14, 0]['arcTime'] = 4
T.edges[14, 9, 0]['arcTime'] = 4
T.edges[14, 19, 0]['arcTime'] = 4
T.edges[19, 14, 0]['arcTime'] = 4
return T
elif self.type == 1:
# <载入文件模块>
df_nodelist = pd.read_csv('../train_dataset/normalnodelist.csv')
df_edgelist = pd.read_csv('../train_dataset/normaledgelist.csv')
# 创建多重有向图,add_edge(1,2), add_edge(2,1)
T = nx.MultiDiGraph() # 初始化图并载入点和边模块
T.add_nodes_from(df_nodelist['node']) # 添加点auto
T.add_edges_from(zip(df_edgelist['node1'], df_edgelist['node2'])) # 添加边auto
# <设置人工网络arcTime和distance模块>
for u, v, d in T.edges(data=True):
T.edges[u, v, 0]['distance'] = 1
for u, v, d in T.edges(data=True): # 设置outside的行程时间
T.edges[u, v, 0]['arcTime'] = 1
T.edges[31, 32, 0]['arcTime'] = 4 # 设置upper-left的行程时间
T.edges[32, 31, 0]['arcTime'] = 4
T.edges[31, 51, 0]['arcTime'] = 4 # 设置第2row的weight
T.edges[51, 31, 0]['arcTime'] = 4
for i in range(32, 39):
T.edges[i, i - 1, 0]['arcTime'] = 4
T.edges[i - 1, i, 0]['arcTime'] = 4
T.edges[i, i + 1, 0]['arcTime'] = 4
T.edges[i + 1, i, 0]['arcTime'] = 4
T.edges[i, i + 20, 0]['arcTime'] = 4
T.edges[i + 20, i, 0]['arcTime'] = 4
T.edges[39, 38, 0]['arcTime'] = 4
T.edges[38, 39, 0]['arcTime'] = 4
T.edges[39, 59, 0]['arcTime'] = 4
T.edges[59, 39, 0]['arcTime'] = 4
for j in range(51, 191, 20): # 设置第3row到第9row的weight
T.edges[j, j + 1, 0]['arcTime'] = 4
T.edges[j + 1, j, 0]['arcTime'] = 4
T.edges[j, j - 20, 0]['arcTime'] = 4
T.edges[j - 20, j, 0]['arcTime'] = 4
T.edges[j, j + 20, 0]['arcTime'] = 4
T.edges[j + 20, j, 0]['arcTime'] = 4
for i in range(j + 1, j + 8):
T.edges[i, i - 1, 0]['arcTime'] = 4
T.edges[i - 1, i, 0]['arcTime'] = 4
T.edges[i, i + 1, 0]['arcTime'] = 4
T.edges[i + 1, i, 0]['arcTime'] = 4
T.edges[i, i - 20, 0]['arcTime'] = 4
T.edges[i - 20, i, 0]['arcTIme'] = 4
T.edges[i, i + 20, 0]['arcTime'] = 4
T.edges[i + 20, i, 0]['arcTime'] = 4
T.edges[j + 8, j + 8 - 1, 0]['arcTime'] = 4
T.edges[j + 8 - 1, j + 8, 0]['arcTime'] = 4
T.edges[j + 8, j + 8 - 20, 0]['arcTime'] = 4
T.edges[j + 8 - 20, j + 8, 0]['arcTime'] = 4
T.edges[j + 8, j + 8 + 20, 0]['arcTime'] = 4
T.edges[j + 8 + 20, j + 8, 0]['arcTime'] = 4
T.edges[191, 192, 0]['arcTime'] = 4 # 设置第10row的weight
T.edges[192, 191, 0]['arcTime'] = 4
T.edges[191, 171, 0]['arcTime'] = 4
T.edges[171, 191, 0]['arcTime'] = 4
for i in range(192, 199):
T.edges[i, i - 1, 0]['arcTime'] = 4
T.edges[i - 1, i, 0]['arcTime'] = 4
T.edges[i, i + 1, 0]['arcTime'] = 4
T.edges[i + 1, i, 0]['arcTime'] = 4
T.edges[i, i - 20, 0]['arcTime'] = 4
T.edges[i - 20, i, 0]['arcTime'] = 4
T.edges[199, 198, 0]['arcTime'] = 4
T.edges[198, 199, 0]['arcTime'] = 4
T.edges[199, 179, 0]['arcTime'] = 4
T.edges[179, 199, 0]['arcTime'] = 4
T.edges[202, 203, 0]['arcTime'] = 2 # 设置lower-right的行程时间
T.edges[203, 202, 0]['arcTime'] = 2
T.edges[202, 222, 0]['arcTime'] = 2 # 设置第11row的weight
T.edges[222, 202, 0]['arcTime'] = 2
for i in range(203, 210):
T.edges[i, i - 1, 0]['arcTime'] = 2
T.edges[i - 1, i, 0]['arcTime'] = 2
T.edges[i, i + 1, 0]['arcTime'] = 2
T.edges[i + 1, i, 0]['arcTime'] = 2
T.edges[i, i + 20, 0]['arcTime'] = 2
T.edges[i + 20, i, 0]['arcTime'] = 2
T.edges[210, 209, 0]['arcTime'] = 2
T.edges[209, 210, 0]['arcTime'] = 2
T.edges[210, 230, 0]['arcTime'] = 2
T.edges[230, 210, 0]['arcTime'] = 2
for j in range(222, 362, 20): # 设置第12row到第18row的weight
T.edges[j, j + 1, 0]['arcTime'] = 2
T.edges[j + 1, j, 0]['arcTime'] = 2
T.edges[j, j - 20, 0]['arcTime'] = 2
T.edges[j - 20, j, 0]['arcTime'] = 2
T.edges[j, j + 20, 0]['arcTime'] = 2
T.edges[j + 20, j, 0]['arcTime'] = 2
for i in range(j + 1, j + 8):
T.edges[i, i - 1, 0]['arcTime'] = 2
T.edges[i - 1, i, 0]['arcTime'] = 2
T.edges[i, i + 1, 0]['arcTime'] = 2
T.edges[i + 1, i, 0]['arcTime'] = 2
T.edges[i, i - 20, 0]['arcTime'] = 2
T.edges[i - 20, i, 0]['arcTime'] = 2
T.edges[i, i + 20, 0]['arcTime'] = 2
T.edges[i + 20, i, 0]['arcTIme'] = 2
T.edges[j + 8, j + 8 - 1, 0]['arcTime'] = 2
T.edges[j + 8 - 1, j + 8, 0]['arcTIme'] = 2
T.edges[j + 8, j + 8 - 1, 0]['arcTime'] = 2
T.edges[j + 8 - 1, j + 8, 0]['arcTime'] = 2
T.edges[j + 8, j + 8 - 20, 0]['arcTime'] = 2
T.edges[j + 8 - 20, j + 8, 0]['arcTime'] = 2
T.edges[362, 363, 0]['arcTime'] = 2 # 设置第19row的weight
T.edges[363, 362, 0]['arcTime'] = 2
T.edges[362, 342, 0]['arcTime'] = 2
T.edges[342, 362, 0]['arcTime'] = 2
for i in range(363, 370):
T.edges[i, i - 1, 0]['arcTime'] = 2
T.edges[i - 1, i, 0]['arcTime'] = 2
T.edges[i, i + 1, 0]['arcTime'] = 2
T.edges[i + 1, i, 0]['arcTime'] = 2
T.edges[i, i - 20, 0]['arcTime'] = 2
T.edges[i - 20, i, 0]['arcTime'] = 2
T.edges[370, 369, 0]['arcTime'] = 2
T.edges[369, 370, 0]['arcTime'] = 2
T.edges[370, 350, 0]['arcTime'] = 2
T.edges[350, 370, 0]['arcTime'] = 2
return T
else:
# manhattan的图对象小弧数据未知
pass
def generate_distribution(self):
"""
对origin和destination进行均匀分布采样
:para num: 产生的观察样本的数量
:return: 返回origin和destination的均匀列表
"""
if self.type == 0:
# <随机分布模块>
origin_observations = [] # 产生均匀分布的origin
for i in range(self.num):
origin_observations.append(round(random.uniform(1, 25)))
destination_observations = [] # 产生均匀分布的destination
for i in range(self.num):
destination_observations.append(round(random.uniform(1, 25)))
origin_destination_observations = [] # 产生均匀分布的origin和destination
for i in range(self.num):
if origin_observations[i] != destination_observations[i]:
origin_destination_observations.append([origin_observations[i], destination_observations[i]])
return origin_destination_observations
elif self.type == 1:
# <随机分布模块>
origin_observations = [] # 产生均匀分布的origin
for i in range(self.num):
origin_observations.append(round(random.uniform(1, 400)))
destination_observations = [] # 产生均匀分布的destination
for i in range(self.num):
destination_observations.append(round(random.uniform(1, 400)))
origin_destination_observations = [] # 产生均匀分布的origin和destination
for i in range(self.num):
if origin_observations[i] != destination_observations[i]:
origin_destination_observations.append([origin_observations[i], destination_observations[i]])
return origin_destination_observations
else:
# 真实数据不需要生成仿真数据
pass
def lognormal_distribution(self, origin, destination):
T = self.True_Graph()
travelTime, path = self.modified_dijkstras(T, origin, destination)
mu = math.log(travelTime)
return random.lognormvariate(mu, self.sigma)
def get_observations(self): # get_observations是一个生成器
"""Return a generator that yields observation objects"""
origin_destination_observations = self.generate_distribution()
for i in range(len(origin_destination_observations)):
traveltime = self.lognormal_distribution(origin_destination_observations[i][0],
origin_destination_observations[i][1])
yield World.Observation(traveltime, origin_destination_observations[i][0],
origin_destination_observations[i][1])
def project(self, G, lng, lat):
"""
将某个点的坐标按照欧式距离映射到网络中最近的拓扑点上
:Param G: 拓扑图
:Param lng: 经度
:Param lat: 纬度
:Return: 返回最近的点的OSMid
"""
nearest_node = None
shortest_distance = inf
for n, d in G.nodes(data=True):
# d['x']是经度,d['y']是纬度
new_shortest_distance = ox.distance.euclidean_dist_vec(lng, lat, d['x'], d['y'])
if new_shortest_distance < shortest_distance:
nearest_node = n
shortest_distance = new_shortest_distance
return nearest_node, shortest_distance
def get_df_observations(self):
"""
将观察的样本数据存到同级文件夹data中的observed_data.csv文件中,并读取成dataframe格式
:return: 返回观察的样本数据的dataframe格式
"""
if self.type == 0:
os.makedirs(os.path.join('..', 'train_dataset'), exist_ok=True) # 创建一个人工数据集,并存储在csv(逗号分隔值)文件
data_file = os.path.join('..', 'train_dataset', 'small_synthetic_observed_data.csv')
with open(data_file, 'w') as f:
f.write('traveltime,origin,destination\n')
for item in self.get_observations():
if item[1] != item[2]:
f.write('{0},{1},{2}\n'.format(item[0], item[1], item[2]))
df_observed_data = pd.read_csv("../train_dataset/small_synthetic_observed_data.csv")
return df_observed_data
elif self.type == 1:
os.makedirs(os.path.join('..', 'train_dataset'), exist_ok=True) # 创建一个人工数据集,并存储在csv(逗号分隔值)文件
data_file = os.path.join('..', 'train_dataset', 'normal_synthetic_observed_data.csv')
with open(data_file, 'w') as f:
f.write('traveltime,origin,destination\n')
for item in self.get_observations():
if item[1] != item[2]:
f.write('{0},{1},{2}\n'.format(item[0], item[1], item[2]))
df_observed_data = pd.read_csv("../train_dataset/normal_synthetic_observed_data.csv")
return df_observed_data
else:
# 获取manhattan的networkx对象
G = ox.graph_from_place('Manhattan, New York City, New York, USA', network_type='drive')
# 将network对象转换成geodatafram对象
gdf_nodes, gdf_edges = ox.graph_to_gdfs(G)
# observe convert to get_nearest_node路网点,转换成路网点的观察数据dataframe
df_dataset = pd.read_csv("../train_dataset/dataset.csv")
df_dataset['dist'] = df_dataset.apply(
lambda row: self.project(G, row['pickup_longitude'], row['pickup_latitude'])[1] +
self.project(G, row['dropoff_longitude'], row['dropoff_latitude'])[1], axis=1)
df_dataset = df_dataset[df_dataset['dist'] <= 0.002]
df_dataset.to_csv("../train_dataset/processed_dataset.csv")
# observe convert to get_nearest_node路网点,转换成路网点的观察数据dataframe
df_dataset = pd.read_csv("../train_dataset/processed_dataset.csv")
# 注意axis=1的使用
df_dataset['pickup_osmid'] = df_dataset.apply(
lambda row: self.project(G, row['pickup_longitude'], row['pickup_latitude'])[0], axis=1)
df_dataset['dropoff_osmid'] = df_dataset.apply(
lambda row: self.project(G, row['dropoff_longitude'], row['dropoff_latitude'])[0], axis=1)
# d['x']是经度, d['y']是纬度
df_dataset['projected_pickup_longitude'] = df_dataset.apply(lambda row: G.nodes[row['pickup_osmid']]['x'],
axis=1)
df_dataset['projected_pickup_latitude'] = df_dataset.apply(lambda row: G.nodes[row['pickup_osmid']]['y'],
axis=1)
df_dataset['geometry'] = df_dataset.apply(
lambda row: Point(float(row['projected_pickup_longitude']), float(row['projected_pickup_latitude'])),
axis=1)
# 转换dataframe成goedataframe
df_dataset_geo = gpd.GeoDataFrame(df_dataset, crs=gdf_edges.crs, geometry=df_dataset.geometry)
os.makedirs(os.path.join('..', 'train_dataset'), exist_ok=True) # 创建一个人工数据集,并存储在csv(逗号分隔值)文件
data_file = os.path.join('..', 'train_dataset', 'real_observed_data.csv')
with open(data_file, 'w') as f:
f.write('traveltime,origin_osmid,destination_osmid\n')
for i in range(len(df_dataset_geo)):
if df_dataset_geo.iloc[i, 11] != df_dataset_geo.iloc[i, 12] and df_dataset_geo.iloc[
i, 11] / 60 >= 1 and df_dataset_geo.iloc[i, 11] / 60 <= 60:
f.write('{0},{1},{2}\n'.format(df_dataset_geo.iloc[i, 11] / 60, df_dataset_geo.iloc[i, 13],
df_dataset_geo.iloc[i, 14]))
df_observed_data = pd.read_csv("../train_dataset/real_observed_data.csv")
return df_observed_data
def get_train_dataset(self):
"""
将观察的样本数据存到同级文件夹data中的observed_data.csv文件中,并读取成dataframe格式
:return: 返回观察的样本数据的dataframe格式
"""
if self.type == 0:
os.makedirs(os.path.join('..', 'train_dataset'), exist_ok=True) # 创建一个人工数据集,并存储在csv(逗号分隔值)文件
data_file = os.path.join('..', 'train_dataset', 'small_train_data.csv')
with open(data_file, 'w') as f:
f.write('traveltime,origin,destination\n')
for item in self.get_observations():
if item[1] != item[2]:
f.write('{0},{1},{2}\n'.format(item[0], item[1], item[2]))
df_train_data = pd.read_csv("../train_dataset/small_train_data.csv")
return df_train_data
elif self.type == 1:
os.makedirs(os.path.join('..', 'train_dataset'), exist_ok=True) # 创建一个人工数据集,并存储在csv(逗号分隔值)文件
data_file = os.path.join('..', 'train_dataset', 'normal_train_data.csv')
with open(data_file, 'w') as f:
f.write('traveltime,origin,destination\n')
for item in self.get_observations():
if item[1] != item[2]:
f.write('{0},{1},{2}\n'.format(item[0], item[1], item[2]))
df_train_data = pd.read_csv("../train_dataset/normal_train_data.csv")
return df_train_data
else:
# 获取manhattan的networkx对象
G = ox.graph_from_place('Manhattan, New York City, New York, USA', network_type='drive')
# 将network对象转换成geodatafram对象
gdf_nodes, gdf_edges = ox.graph_to_gdfs(G)
# observe convert to get_nearest_node路网点,转换成路网点的观察数据dataframe
df_dataset = pd.read_csv("../train_dataset/train_dataset.csv")
df_dataset['dist'] = df_dataset.apply(
lambda row: self.project(G, row['pickup_longitude'], row['pickup_latitude'])[1] +
self.project(G, row['dropoff_longitude'], row['dropoff_latitude'])[1], axis=1)
df_dataset = df_dataset[df_dataset['dist'] <= 0.002]
df_dataset.to_csv("../train_dataset/processed_dataset.csv")
# observe convert to get_nearest_node路网点,转换成路网点的观察数据dataframe
df_dataset = pd.read_csv("../train_dataset/processed_dataset.csv")
# 注意axis=1的使用
df_dataset['pickup_osmid'] = df_dataset.apply(
lambda row: self.project(G, row['pickup_longitude'], row['pickup_latitude'])[0], axis=1)
df_dataset['dropoff_osmid'] = df_dataset.apply(
lambda row: self.project(G, row['dropoff_longitude'], row['dropoff_latitude'])[0], axis=1)
# d['x']是经度, d['y']是纬度
df_dataset['projected_pickup_longitude'] = df_dataset.apply(lambda row: G.nodes[row['pickup_osmid']]['x'],
axis=1)
df_dataset['projected_pickup_latitude'] = df_dataset.apply(lambda row: G.nodes[row['pickup_osmid']]['y'],
axis=1)
df_dataset['geometry'] = df_dataset.apply(
lambda row: Point(float(row['projected_pickup_longitude']), float(row['projected_pickup_latitude'])),
axis=1)
# 转换dataframe成goedataframe
df_dataset_geo = gpd.GeoDataFrame(df_dataset, crs=gdf_edges.crs, geometry=df_dataset.geometry)
os.makedirs(os.path.join('..', 'train_dataset'), exist_ok=True) # 创建一个人工数据集,并存储在csv(逗号分隔值)文件
data_file = os.path.join('..', 'train_dataset', 'real_train_data.csv')
with open(data_file, 'w') as f:
f.write('traveltime,origin_osmid,destination_osmid\n')
for i in range(len(df_dataset_geo)):
if df_dataset_geo.iloc[i, 11] != df_dataset_geo.iloc[i, 12] and df_dataset_geo.iloc[
i, 11] / 60 >= 1 and df_dataset_geo.iloc[i, 11] / 60 <= 60:
f.write('{0},{1},{2}\n'.format(df_dataset_geo.iloc[i, 11] / 60, df_dataset_geo.iloc[i, 13],
df_dataset_geo.iloc[i, 14]))
df_train_data = pd.read_csv("../train_dataset/real_train_data.csv")
return df_train_data
def modified_dijkstras(self, G, origin, destination):
"""
最短路算法
:return: 返回一个traveltime和path
"""
count = 0
paths_and_distances = {}
for node in G.nodes():
paths_and_distances[node] = [inf, [origin]]
paths_and_distances[origin][0] = 0
vertices_to_explore = [(0, origin)]
while vertices_to_explore:
current_distance, current_vertex = heappop(vertices_to_explore)
for neighbor in G.neighbors(current_vertex):
edge_weight = G.get_edge_data(current_vertex, neighbor, 0)['arcTime']
new_distance = current_distance + edge_weight
new_path = paths_and_distances[current_vertex][1] + [neighbor]
if new_distance < paths_and_distances[neighbor][0]:
paths_and_distances[neighbor][0] = new_distance
paths_and_distances[neighbor][1] = new_path
heappush(vertices_to_explore, (new_distance, neighbor))
count += 1
return paths_and_distances[destination]
def Graph(self):
"""
加载初始化人工网络
:return: 返回一个加载好的的图G对象
"""
if self.type == 0:
# <载入文件模块>
df_nodelist = pd.read_csv('../train_dataset/smallnodelist.csv')
df_edgelist = pd.read_csv('../train_dataset/smalledgelist.csv')
G = nx.MultiDiGraph() # 初始化图并载入点和边模块
G.add_nodes_from(df_nodelist['node']) # 添加点auto
G.add_edges_from(zip(df_edgelist['node1'], df_edgelist['node2'])) # 添加边auto
# <设置人工网络weight模块>
# 搜索nodes和edges一个是一个key,另一个是两个key
# 设置点对象的x和y坐标,方便自动生成geometry
for u, d in G.nodes(data=True):
u_lng = df_nodelist[df_nodelist.node == u].values.squeeze()[1]
u_lat = df_nodelist[df_nodelist.node == u].values.squeeze()[2]
d['y'] = u_lat
d['x'] = u_lng
# d['y'] = 0
# d['x'] = 0
# 双向车道,因此这是一个多重图
for u, v, d in G.edges(data=True): # 设置outside的行程时间
G.edges[u, v, 0]['arcTime'] = 1
for u, v, d in G.edges(data=True):
G.edges[u, v, 0]['distance'] = 1
# 设置图对象的crs
G.graph['crs'] = "epsg:4326"
return G
elif self.type == 1:
# <载入文件模块>
df_nodelist = pd.read_csv('../train_dataset/normalnodelist.csv')
df_edgelist = pd.read_csv('../train_dataset/normaledgelist.csv')
G = nx.MultiDiGraph() # 初始化图并载入点和边模块
G.add_nodes_from(df_nodelist['node']) # 添加点auto
G.add_edges_from(zip(df_edgelist['node1'], df_edgelist['node2'])) # 添加边auto
# <设置人工网络weight模块>
# 搜索nodes和edges一个是一个key,另一个是两个key
# 设置点对象的x和y坐标,方便自动生成geometry
for u, d in G.nodes(data=True):
u_lng = df_nodelist[df_nodelist.node == u].values.squeeze()[1]
u_lat = df_nodelist[df_nodelist.node == u].values.squeeze()[2]
d['y'] = u_lat
d['x'] = u_lng
# d['y'] = 0
# d['x'] = 0
# 双向车道,因此这是一个多重图
for u, v, d in G.edges(data=True): # 设置outside的行程时间
G.edges[u, v, 0]['arcTime'] = 1
for u, v, d in G.edges(data=True):
G.edges[u, v, 0]['distance'] = 1
# 设置图对象的crs
G.graph['crs'] = "epsg:4326"
return G
else:
# <载入文件模块>
# 获取manhattan的networkx对象
G = ox.graph_from_place('Manhattan, New York City, New York, USA', network_type='drive')
# <设置人工网络weight模块>
# 多重无向图与无向图添加权重的方式不同,d就是属性字典,无向图中G.edges[u,v]是字典而多重无向图G.edges[u,v]不是
for u, v, d in G.edges(data=True): # 设置outside的行程时间
G.edges[u, v, 0]['arcTime'] = 1
for u, v, d in G.edges(data=True):
G.edges[u, v, 0]['distance'] = 1
return G
def optimization_method(self, G, K):
"""
SOCP优化算法
:para G: 初始化得到的或上一次迭代计算得到的网络图
:para K: path set
:return: 更新过弧行程时间的网络图
"""
if self.type == 0:
# <读取数据>
df_observed_data = pd.read_csv('../train_dataset/small_synthetic_observed_data.csv')
W = df_observed_data # 有旅行时间数据的origin,destination集合:观察集合W
E = G.edges # 所有的小弧的集合:arc集合E
# <help函数>
def geometric_mean(data): # 计算几何平均数T_od
total = 1
for i in data:
total *= i # 等同于total=total*i
return pow(total, 1 / len(data))
# <定义模型>
m = Model("SOCP model")
# <定义参数>
time_limit = self.time_limit
reg = self.reg # 需要针对问题规模灵活选择
# <定义自变量>
names = locals()
# 变量1:t_ij
for node1, node2, temp in E: # 定义小弧的行程时间估计变量t_ij
names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='arc_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
# 变量2:T_hat
for i in range(W.shape[0]): # 定义旅行的行程时间估计变量T^hat
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
names['trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='trip_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
# 变量3:x_od
for i in range(W.shape[0]): # 定义行程时间估计的误差x_od
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='error_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
for node1, node2, temp in E: # 定义绝对值线性化
names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='abs_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
names['abs_' + 'node1_' + str(node2) + '_node2_' + str(node1)] = m.addVar(vtype=GRB.CONTINUOUS,
name='abs_' + 'node1_' + str(
node2) + '_node2_' + str(
node1))
# <定义数据结构>
# 数据结构1:P
P = defaultdict(list) # 使用上一次迭代产生的路段行程时间计算本次迭代优化模型的最短路向量
for i in range(W.shape[0]):
origin = int(W.iloc[i][1])
destination = int(W.iloc[i][2])
P['node1_' + str(origin) + '_node2_' + str(destination)] = \
self.modified_dijkstras(G, origin, destination)[1]
# 数据结构2:K
for key, val in P.items(): # W中观察点的路径集合
string = key.split('_')
origin = int(string[1])
destination = int(string[3])
K['node1_' + str(origin) + '_node2_' + str(destination)].append(val)
# 数据结构3:所有观察样本
O = defaultdict(list) # origin和destination的行程时间列表
for i in range(df_observed_data.shape[0]):
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)].append(
df_observed_data.iloc[i][0])
# 数据结构4:所有观察样本时间的几何平均
M = defaultdict(int) # origin和destination的行程时间几何平均值
for i in range(df_observed_data.shape[0]):
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
M['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)] = geometric_mean(
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)])
# <定义约束>
# 11b约束
for i in range(df_observed_data.shape[0]): # 添加最短路约束
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
traveltime, path = self.modified_dijkstras(G, origin, destination)
arcSum = 0
for i in range(len(path) - 1):
node1 = int(path[i])
node2 = int(path[i + 1])
arcSum += names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
m.addConstr(names['trip_' + 'node1_' + str(origin) + '_node2_' + str(
destination)] == arcSum) # 添加最短路径行程时间等于旅行的行程时间估计变量的线性约束
# 11c约束
if K:
for key, val in K.items():
string = key.split('_')
origin = int(string[1])
destination = int(string[3])
for path in val:
othertime = 0
for i in range(len(path) - 1):
node1 = path[i]
node2 = path[i + 1]
othertime += names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
m.addConstr(
othertime >= names['trip_' + 'node1_' + str(origin) + '_node2_' + str(destination)]) # 符号反了
# 11d约束
for i in range(W.shape[0]): # 添加误差最小的线性约束
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
m.addConstr(names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= names[
'trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)] / M[
'observe_' + 'node1_' + str(node1) + '_node2_' + str(node2)])
# 11e约束
for i in range(W.shape[0]): # # 添加误差最小的范数约束
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
qexpr1 = names['trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)] - names[
'error_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
qexpr2 = 2 * np.sqrt(M['observe_' + 'node1_' + str(node1) + '_node2_' + str(node2)])
qexpr3 = names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] + names[
'trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
m.addQConstr(qexpr1 * qexpr1 + qexpr2 * qexpr2 <= qexpr3 * qexpr3)
# 11f约束
for node1, node2, temp in E: # 加速度限制的线性约束
m.addConstr(names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= time_limit)
# <定义目标函数>
obj = 0
# 添加loss项
for i in range(W.shape[0]):
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
n_od = len(O['observe_' + 'node1_' + str(node1) + '_node2_' + str(node2)])
obj += names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] * n_od
# 添加惩罚项
for node1, node2, temp in E:
for node3, node4, temp in E:
# 列表求交集,判断连续弧
arc1 = [node1, node2]
arc2 = [node3, node4]
intersection = list(set(arc1) & set(arc2))
if intersection:
arc1 = names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
arc2 = names['arc_' + 'node1_' + str(node3) + '_node2_' + str(node4)]
dis1 = G.edges[node1, node2, 0]['distance']
dis2 = G.edges[node3, node4, 0]['distance']
m.addConstr(
names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= arc1 / dis1 - arc2 / dis2)
m.addConstr(names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= -(
arc1 / dis1 - arc2 / dis2))
obj += reg * names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] * 2 / (dis1 + dis2)
# 添加目标函数
m.setObjective(obj)
# <求解模型>
m.optimize()
# print('最优值:',m.objVal)
# for v in m.getVars():
# print("参数", v.varName,'=',v.x)
# <更新结果>
for v in m.getVars():
string = v.varName.split('_')
node1 = int(string[2])
node2 = int(string[4])
if 'arc' in v.varName: # 将arc_node1_num_node2_num的weight更新
G.edges[node1, node2, 0]['arcTime'] = v.x
return G, K, P
elif self.type == 1:
# <读取数据>
df_observed_data = pd.read_csv('../train_dataset/normal_synthetic_observed_data.csv')
W = df_observed_data # 有旅行时间数据的origin,destination集合:观察集合W
E = G.edges # 所有的小弧的集合:arc集合E
# <help函数>
def geometric_mean(data): # 计算几何平均数T_od
total = 1
for i in data:
total *= i # 等同于total=total*i
return pow(total, 1 / len(data))
# <定义模型>
m = Model("SOCP model")
# <定义参数>
time_limit = self.time_limit
reg = self.reg # 需要针对问题规模灵活选择
# <定义自变量>
names = locals()
# 变量1:t_ij
for node1, node2, temp in E: # 定义小弧的行程时间估计变量t_ij
names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='arc_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
# 变量2:T_hat
for i in range(W.shape[0]): # 定义旅行的行程时间估计变量T^hat
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
names['trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='trip_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
# 变量3:x_od
for i in range(W.shape[0]): # 定义行程时间估计的误差x_od
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='error_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
for node1, node2, temp in E: # 定义绝对值线性化
names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='abs_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
# <定义数据结构>
# 数据结构1:P
P = defaultdict(list) # 使用上一次迭代产生的路段行程时间计算本次迭代优化模型的最短路向量
for i in range(W.shape[0]):
origin = int(W.iloc[i][1])
destination = int(W.iloc[i][2])
P['node1_' + str(origin) + '_node2_' + str(destination)] = \
self.modified_dijkstras(G, origin, destination)[1]
# 数据结构2:K
for i in range(W.shape[0]): # W中观察点的路径集合
origin = int(W.iloc[i][1])
destination = int(W.iloc[i][2])
K['node1_' + str(origin) + '_node2_' + str(destination)].append(
self.modified_dijkstras(G, origin, destination)[1])
# 数据结构3:所有观察样本
O = defaultdict(list) # origin和destination的行程时间列表
for i in range(df_observed_data.shape[0]):
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)].append(
df_observed_data.iloc[i][0])
# 数据结构4:所有观察样本时间的几何平均
M = defaultdict(int) # origin和destination的行程时间几何平均值
for i in range(df_observed_data.shape[0]):
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
M['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)] = geometric_mean(
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)])
# <定义约束>
# 11b约束
for i in range(df_observed_data.shape[0]): # 添加最短路约束
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
traveltime, path = self.modified_dijkstras(G, origin, destination)
arcSum = 0
for i in range(len(path) - 1):
node1 = int(path[i])
node2 = int(path[i + 1])
arcSum += names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
m.addConstr(names['trip_' + 'node1_' + str(origin) + '_node2_' + str(
destination)] == arcSum) # 添加最短路径行程时间等于旅行的行程时间估计变量的线性约束
# 11c约束
if K:
for key, val in K.items():
string = key.split('_')
origin = int(string[1])
destination = int(string[3])
for path in val:
othertime = 0
for i in range(len(path) - 1):
node1 = path[i]
node2 = path[i + 1]
othertime += names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
m.addConstr(
othertime >= names['trip_' + 'node1_' + str(origin) + '_node2_' + str(destination)]) # 符号反了
# 11d约束
for i in range(W.shape[0]): # 添加误差最小的线性约束
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
m.addConstr(names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= names[
'trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)] / M[
'observe_' + 'node1_' + str(node1) + '_node2_' + str(node2)])
# 11e约束
for i in range(W.shape[0]): # # 添加误差最小的范数约束
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
qexpr1 = names['trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)] - names[
'error_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
qexpr2 = 2 * np.sqrt(M['observe_' + 'node1_' + str(node1) + '_node2_' + str(node2)])
qexpr3 = names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] + names[
'trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
m.addQConstr(qexpr1 * qexpr1 + qexpr2 * qexpr2 <= qexpr3 * qexpr3)
# 11f约束
for node1, node2, temp in E: # 加速度限制的线性约束
m.addConstr(names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= time_limit)
# <定义目标函数>
obj = 0
# 添加loss项
for i in range(W.shape[0]):
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
n_od = len(O['observe_' + 'node1_' + str(node1) + '_node2_' + str(node2)])
obj += names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] * n_od
# 添加惩罚项
for node1, node2, temp in E:
for node3, node4, temp in E:
# 列表求交集,判断连续弧
arc1 = [node1, node2]
arc2 = [node3, node4]
intersection = list(set(arc1) & set(arc2))
if intersection:
arc1 = names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
arc2 = names['arc_' + 'node1_' + str(node3) + '_node2_' + str(node4)]
dis1 = G.edges[node1, node2, 0]['distance']
dis2 = G.edges[node3, node4, 0]['distance']
obj += reg * names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] * 2 / (dis1 + dis2)
m.addConstr(
names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= arc1 / dis1 - arc2 / dis2)
m.addConstr(names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= -(
arc1 / dis1 - arc2 / dis2))
# 添加目标函数
m.setObjective(obj, gurobipy.GRB.MINIMIZE)
# <求解模型>
m.optimize()
# print('最优值:',m.objVal)
# for v in m.getVars():
# print("参数", v.varName,'=',v.x)
# <更新结果>
for v in m.getVars():
string = v.varName.split('_')
node1 = int(string[2])
node2 = int(string[4])
if 'arc' in v.varName: # 将arc_node1_num_node2_num的weight更新
G.edges[node1, node2, 0]['arcTime'] = v.x
return G, K, P
else:
# <读取数据>
df_observed_data = pd.read_csv('../train_dataset/real_observed_data.csv')
W = df_observed_data # 有旅行时间数据的origin,destination集合:观察集合W
E = G.edges # 所有的小弧的集合:arc集合E
# <help函数>
def geometric_mean(data): # 计算几何平均数T_od
total = 1
for i in data:
total *= i # 等同于total=total*i
return pow(total, 1 / len(data))
# <定义模型>
m = Model("SOCP model")
# <定义参数>
time_limit = self.time_limit
reg = self.reg # 需要针对问题规模灵活选择
# <定义自变量>
names = locals()
# 变量1:t_ij
for node1, node2, temp in E: # 定义小弧的行程时间估计变量t_ij
if temp == 0:
names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='arc_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
# 变量2:T_hat
for i in range(W.shape[0]): # 定义旅行的行程时间估计变量T^hat
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
names['trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='trip_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
# 变量3:x_od
for i in range(W.shape[0]): # 定义行程时间估计的误差x_od
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='error_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
for node1, node2, temp in E: # 定义绝对值线性化
names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] = m.addVar(vtype=GRB.CONTINUOUS,
name='abs_' + 'node1_' + str(
node1) + '_node2_' + str(
node2))
# <定义数据结构>
# 数据结构1:P
P = defaultdict(list) # 使用上一次迭代产生的路段行程时间计算本次迭代优化模型的最短路向量
for i in range(W.shape[0]):
origin = int(W.iloc[i][1])
destination = int(W.iloc[i][2])
P['node1_' + str(origin) + '_node2_' + str(destination)] = \
self.modified_dijkstras(G, origin, destination)[1]
# 数据结构2:K
for i in range(W.shape[0]): # W中观察点的路径集合
origin = int(W.iloc[i][1])
destination = int(W.iloc[i][2])
K['node1_' + str(origin) + '_node2_' + str(destination)].append(
self.modified_dijkstras(G, origin, destination)[1])
# 数据结构3:所有观察样本
O = defaultdict(list) # origin和destination的行程时间列表
for i in range(W.shape[0]):
origin = int(W.iloc[i][1])
destination = int(W.iloc[i][2])
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)].append(int(W.iloc[i][0]))
# 数据结构4:所有观察样本时间的几何平均
M = defaultdict(int) # origin和destination的行程时间几何平均值
for i in range(W.shape[0]):
origin = int(W.iloc[i][1])
destination = int(W.iloc[i][2])
M['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)] = geometric_mean(
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)])
# <定义约束>
# 11b约束
for i in range(W.shape[0]): # 添加最短路约束
origin = int(W.iloc[i][1])
destination = int(W.iloc[i][2])
traveltime, path = self.modified_dijkstras(G, origin, destination)
arcSum = 0
for i in range(len(path) - 1):
node1 = int(path[i])
node2 = int(path[i + 1])
arcSum += names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
m.addConstr(names['trip_' + 'node1_' + str(origin) + '_node2_' + str(
destination)] == arcSum) # 添加最短路径行程时间等于旅行的行程时间估计变量的线性约束
# 11c约束
if K:
for key, val in K.items():
string = key.split('_')
origin = int(string[1])
destination = int(string[3])
for path in val:
othertime = 0
for i in range(len(path) - 1):
node1 = path[i]
node2 = path[i + 1]
othertime += names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
m.addConstr(
othertime >= names['trip_' + 'node1_' + str(origin) + '_node2_' + str(destination)]) # 符号反了
# 11d约束
for i in range(W.shape[0]): # 添加误差最小的线性约束
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
m.addConstr(names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= names[
'trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)] / M[
'observe_' + 'node1_' + str(node1) + '_node2_' + str(node2)])
# 11e约束
for i in range(W.shape[0]): # # 添加误差最小的范数约束
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
qexpr1 = names['trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)] - names[
'error_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
qexpr2 = 2 * np.sqrt(M['observe_' + 'node1_' + str(node1) + '_node2_' + str(node2)])
qexpr3 = names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] + names[
'trip_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
m.addQConstr(qexpr1 * qexpr1 + qexpr2 * qexpr2 <= qexpr3 * qexpr3)
# # 11f约束
# for node1,node2,temp in E: # 加速度限制的线性约束,无解有可能是time_limit的问题
# m.addConstr(names['arc_'+ 'node1_'+str(node1) +'_node2_'+ str(node2)] >= time_limit)
# <定义目标函数>
obj = 0
# 添加loss项
for i in range(W.shape[0]):
node1 = int(W.iloc[i][1])
node2 = int(W.iloc[i][2])
n_od = len(O['observe_' + 'node1_' + str(node1) + '_node2_' + str(node2)])
obj += names['error_' + 'node1_' + str(node1) + '_node2_' + str(node2)] * n_od
# 添加惩罚项
for node1, node2, temp in E:
for node3, node4, temp in E:
# 列表求交集,判断连续弧
arc1 = [node1, node2]
arc2 = [node3, node4]
intersection = list(set(arc1) & set(arc2))
if intersection:
arc1 = names['arc_' + 'node1_' + str(node1) + '_node2_' + str(node2)]
arc2 = names['arc_' + 'node1_' + str(node3) + '_node2_' + str(node4)]
dis1 = G.edges[node1, node2, 0]['distance']
dis2 = G.edges[node3, node4, 0]['distance']
obj += reg * names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] * 2 / (dis1 + dis2)
m.addConstr(
names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= arc1 / dis1 - arc2 / dis2)
m.addConstr(names['abs_' + 'node1_' + str(node1) + '_node2_' + str(node2)] >= -(
arc1 / dis1 - arc2 / dis2))
# 添加目标函数
m.setObjective(obj, gurobipy.GRB.MINIMIZE)
# <求解模型>
m.optimize()
# print('最优值:',m.objVal)
# for v in m.getVars():
# print("参数", v.varName,'=',v.x)
# <更新结果>
for v in m.getVars():
string = v.varName.split('_')
node1 = int(string[2])
node2 = int(string[4])
if 'arc' in v.varName: # 将arc_node1_num_node2_num的weight更新
G.edges[node1, node2, 0]['arcTime'] = v.x
return G, K, P
def diff(self, lastP, P):
count = 0
G = self.Graph()
arc_lastP = defaultdict(list)
for key, val in lastP.items(): # lastP {'node1_num_node2_num':[node1,node2]}
for i in range(len(val) - 1):
origin = val[i]
destination = val[i + 1]
arc_lastP[key].append(str(origin) + str(destination)) # {"node1_num_node2_num": [arc1,arc2]}
arc_P = defaultdict(list)
for key, val in P.items():
for i in range(len(val) - 1):
origin = val[i]
destination = val[i + 1]
arc_P[key].append(str(origin) + str(destination))
for key, val in arc_lastP.items(): # {'origin,destination':[arc1,arc2]}
for arc in val:
if arc not in arc_P[key]:
count += 1
for key, val in arc_P.items():
for arc in val:
if arc not in arc_lastP[key]:
count += 1
return count / len(lastP)
def RMLSB(self, G):
"""
定义一个评价函数,对比小弧之间的误差,仿真数据有真实弧数据,而真实数据中通过与其他算法对比获取gap
G: 训练好的图对象
test_dataset: 输入测试集,测试集的数据是没有经过训练过的
"""
RMLSB = 0
if self.type == 0:
train_dataset = "../train_dataset/small_synthetic_observed_data.csv"
elif self.type == 1:
train_dataset = "../train_dataset/normal_synthetic_observed_data.csv"
else:
train_dataset = "../train_dataset/real_observed_data"
# <help函数>
def geometric_mean(data): # 计算几何平均数T_od
total = 1
for i in data:
total *= i # 等同于total=total*i
return pow(total, 1 / len(data))
df_observed_data = pd.read_csv(train_dataset)
# 数据结构3:所有观察样本
O = defaultdict(list) # origin和destination的行程时间列表
for i in range(df_observed_data.shape[0]):
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)].append(df_observed_data.iloc[i][0])
# 数据结构4:所有观察样本时间的几何平均
M = defaultdict(int) # origin和destination的行程时间几何平均值
for i in range(df_observed_data.shape[0]):
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
M['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)] = geometric_mean(
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)])
for origin in G.nodes():
for destination in G.nodes():
if origin != destination and int(
M['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)]) != 0:
observe = M['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)]
trip = self.modified_dijkstras(G, origin, destination)[0]
print(observe, trip)
RMLSB += math.pow((math.log(trip) - math.log(observe)), 2)
return np.sqrt(RMLSB)
def geo(self, G):
if self.type == 0:
# 载入文件模块
df_nodelist = pd.read_csv('../train_dataset/smallnodelist.csv')
edgelist = []
for u, v, d in G.edges(data=True):
u_lng = df_nodelist[df_nodelist.node == u].values.squeeze()[1]
u_lat = df_nodelist[df_nodelist.node == u].values.squeeze()[2]
v_lng = df_nodelist[df_nodelist.node == v].values.squeeze()[1]
v_lat = df_nodelist[df_nodelist.node == v].values.squeeze()[2]
G.edges[u, v, 0]['geometry'] = LineString([(u_lng, u_lat), (v_lng, v_lat)])
edge_data = dict()
edge_data['node1'] = u
edge_data['node2'] = v
edge_data.update(d)
edgelist.append(edge_data)
df_edgelist = pd.DataFrame(edgelist)
edgelist_crs = {'init': 'epsg:4326'}
df_edgelist_geo = gpd.GeoDataFrame(df_edgelist, crs=edgelist_crs, geometry=df_edgelist.geometry)
return df_edgelist_geo
elif self.type == 1:
# 载入文件模块
df_nodelist = pd.read_csv('../train_dataset/normalnodelist.csv')
edgelist = []
for u, v, d in G.edges(data=True):
u_lng = df_nodelist[df_nodelist.node == u].values.squeeze()[1]
u_lat = df_nodelist[df_nodelist.node == u].values.squeeze()[2]
v_lng = df_nodelist[df_nodelist.node == v].values.squeeze()[1]
v_lat = df_nodelist[df_nodelist.node == v].values.squeeze()[2]
G.edges[u, v, 0]['geometry'] = LineString([(u_lng, u_lat), (v_lng, v_lat)])
edge_data = dict()
edge_data['node1'] = u
edge_data['node2'] = v
edge_data.update(d)
edgelist.append(edge_data)
df_edgelist = pd.DataFrame(edgelist)
edgelist_crs = {'init': 'epsg:4326'}
df_edgelist_geo = gpd.GeoDataFrame(df_edgelist, crs=edgelist_crs, geometry=df_edgelist.geometry)
return df_edgelist_geo
else:
# 绘图模块
gdf_nodes, gdf_edges = ox.graph_to_gdfs(G)
return gdf_edges
def train(self):
if self.type == 0:
start_time = time.time()
# 程序起始
# a tracktable algorithm
K = defaultdict(list)
self.get_df_observations()
difference = inf
G = self.Graph()
T = self.True_Graph()
count = 0
while difference >= 0.5:
self.geo(G).plot(column='arcTime', cmap='RdYlGn')
G, K, P = self.optimization_method(G, K)
if count % 2 == 0:
lastP1 = P
else:
lastP2 = P
if count != 0:
difference = self.diff(lastP1, lastP2)
count += 1
gdf_nodes, gdf_edges = ox.graph_to_gdfs(G)
gdf_nodes.to_file("../smalldata/gdf_nodes" + str(count) + ".geojson", driver="GeoJSON")
gdf_edges.to_file("../smalldata/gdf_edges" + str(count) + ".geojson", driver="GeoJSON")
print(f'正在进行第{count}次迭代,误差为{difference}.')
RMLSB = self.RMLSB(G)
print(f'优化模型当前的RMLSB为{RMLSB}')
# 程序结束
elapsed_time = time.time() - start_time
hour = elapsed_time // 3600
minute = (elapsed_time - hour * 3600) // 60
second = elapsed_time % 60
print(f'inference time cost: {hour} hours, {minute} minutes,{second} seconds')
elif self.type == 1:
start_time = time.time()
# 程序起始
# a tracktable algorithm
K = defaultdict(list)
self.get_df_observations()
difference = inf
G = self.Graph()
T = self.True_Graph()
count = 0
while difference >= 0.5:
self.geo(G).plot(column='arcTime', cmap='RdYlGn')
G, K, P = self.optimization_method(G, K)
if count % 2 == 0:
lastP1 = P
else:
lastP2 = P
if count != 0:
difference = self.diff(lastP1, lastP2)
count += 1
gdf_nodes, gdf_edges = ox.graph_to_gdfs(G)
gdf_nodes.to_file("../normaldata/gdf_nodes" + str(count) + ".geojson", driver="GeoJSON")
gdf_edges.to_file("../normaldata/gdf_edges" + str(count) + ".geojson", driver="GeoJSON")
print(f'正在进行第{count}次迭代,误差为{difference}.')
RMLSB = self.RMLSB(G)
print(f'优化模型当前的RMLSB为{RMLSB}')
# 程序结束
elapsed_time = time.time() - start_time
hour = elapsed_time // 3600
minute = (elapsed_time - hour * 3600) // 60
second = elapsed_time % 60
print(f'inference time cost: {hour} hours, {minute} minutes,{second} seconds')
else:
start_time = time.time()
# 程序起始
# a tracktable algorithm
K = defaultdict(list)
self.get_df_observations()
difference = inf
G = self.Graph()
count = 0
while difference >= 0.5:
# 第k次迭代
fig, ax = plt.subplots(figsize=(30, 30))
self.geo(G).plot(ax=ax, column='arcTime', cmap='Paired', categorical=True)
ax.set_axis_off()
plt.show()
G, K, P = self.optimization_method(G, K)
if count % 2 == 0:
lastP1 = P
else:
lastP2 = P
if count != 0:
difference = self.diff(lastP1, lastP2)
count += 1
gdf_nodes, gdf_edges = ox.graph_to_gdfs(G)
# 使用apply函数清洗不同的数据类型的列
gdf_edges['osmid'] = gdf_edges.apply(lambda row: 0 if type(row['osmid']) == list else row['osmid'],
axis=1)
gdf_edges = gdf_edges[gdf_edges['osmid'] > 0]
gdf_nodes.to_file("../realdata/gdf_nodes" + str(count) + ".geojson", driver="GeoJSON")
gdf_edges.to_file("../realdata/gdf_edges" + str(count) + ".geojson", driver="GeoJSON")
print(f'正在进行第{count}次迭代,误差为{difference}.')
# 程序结束
elapsed_time = time.time() - start_time
hour = elapsed_time // 3600
minute = (elapsed_time - hour * 3600) // 60
second = elapsed_time % 60
print(f'inference time cost: {hour} hours, {minute} minutes,{second} seconds')
def test(self, G):
"""
G: 输入训练好的图模型
test_dataset: 输入测试集,与训练集不同
"""
if self.type == 0:
test_dataset = "../test_dataset/small_train_data.csv"
elif self.type == 1:
test_dataset = "../test_dataset/normal_train_data.csv"
else:
test_dataset = "../test_dataset/real_train_data.csv"
RMLSB = 0
# <help函数>
def geometric_mean(data): # 计算几何平均数T_od
total = 1
for i in data:
total *= i # 等同于total=total*i
return pow(total, 1 / len(data))
df_observed_data = pd.read_csv(test_dataset)
# 数据结构3:所有观察样本
O = defaultdict(list) # origin和destination的行程时间列表
for i in range(df_observed_data.shape[0]):
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)].append(df_observed_data.iloc[i][0])
# 数据结构4:所有观察样本时间的几何平均
M = defaultdict(int) # origin和destination的行程时间几何平均值
for i in range(df_observed_data.shape[0]):
origin = int(df_observed_data.iloc[i][1])
destination = int(df_observed_data.iloc[i][2])
M['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)] = geometric_mean(
O['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)])
for origin in G.nodes():
for destination in G.nodes():
if origin != destination and int(
M['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)]) != 0:
observe = M['observe_' + 'node1_' + str(origin) + '_node2_' + str(destination)]
trip = self.modified_dijkstras(G, origin, destination)[0]
RMLSB += math.pow((math.log(trip) - math.log(observe)), 2)
return np.sqrt(RMLSB)
class Visualization:
def __init__(self, G, type=0, manual=True):
self.G = G
self.type = type
self.manual = manual
def Graph(self):
"""
加载初始化人工网络
:return: 返回一个加载好的的图G对象
"""
# <设置人工网络weight模块>
# 多重无向图与无向图添加权重的方式不同,d就是属性字典,无向图中G.edges[u,v]是字典而多重无向图G.edges[u,v]不是
for u, v, d in self.G.edges(data=True): # 设置outside的行程时间
self.G.edges[u, v, 0]['arcTime'] = 1
for u, v, d in self.G.edges(data=True):
self.G.edges[u, v, 0]['distance'] = 1
return self.G
def project(self, G, lng, lat):
"""
将某个点的坐标按照欧式距离映射到网络中最近的拓扑点上
:Param G: 拓扑图
:Param lng: 经度
:Param lat: 纬度
:Return: 返回最近的点的OSMid
"""
nearest_node = None
shortest_distance = inf
for n, d in G.nodes(data=True):
# d['x']是经度,d['y']是纬度
new_shortest_distance = ox.distance.euclidean_dist_vec(lng, lat, d['x'], d['y'])
if new_shortest_distance < shortest_distance:
nearest_node = n
shortest_distance = new_shortest_distance
return nearest_node, shortest_distance
def modified_dijkstras(self, origin, destination):
"""
最短路算法
:return: 返回一个traveltime和path
"""
count = 0
paths_and_distances = {}
for node in self.G.nodes():
paths_and_distances[node] = [inf, [origin]]
paths_and_distances[origin][0] = 0
vertices_to_explore = [(0, origin)]
while vertices_to_explore:
current_distance, current_vertex = heappop(vertices_to_explore)
for neighbor in self.G.neighbors(current_vertex):
# get_edge_data得到的是嵌套字典
edge_weight = self.G.get_edge_data(current_vertex, neighbor)[0]['arcTime']
new_distance = current_distance + edge_weight
new_path = paths_and_distances[current_vertex][1] + [neighbor]
if new_distance < paths_and_distances[neighbor][0]:
paths_and_distances[neighbor][0] = new_distance
paths_and_distances[neighbor][1] = new_path
heappush(vertices_to_explore, (new_distance, neighbor))
count += 1
return paths_and_distances[destination]
def plot_path_evolution(G):
plt.show()
def plot_taxi_position(self, map=True, kind=0):
if map == False:
# 获取manhattan的networkx对象
G = ox.graph_from_place('Manhattan, New York City, New York, USA', network_type='drive')
gdf_nodes, gdf_edges = ox.graph_to_gdfs(G)
df_dataset = pd.read_csv("../train_dataset/dataset.csv")
df_dataset['geometry'] = df_dataset.apply(
lambda row: Point(float(row['pickup_longitude']), float(row['pickup_latitude'])), axis=1)
df_dataset_geo = gpd.GeoDataFrame(df_dataset, crs=gdf_edges.crs, geometry=df_dataset.geometry)
fig, ax = plt.subplots(figsize=(30, 30))
df_dataset_geo.plot(ax=ax, color='green', markersize=1)
gdf_edges.plot(ax=ax, cmap='Reds')
ax.set_axis_off()
plt.show()
else:
# 获取manhattan的networkx对象
G = ox.graph_from_place('Manhattan, New York City, New York, USA', network_type='drive')
# 将network对象转换成geodatafram对象
gdf_nodes, gdf_edges = ox.graph_to_gdfs(G)
df_dataset = pd.read_csv("../train_dataset/dataset.csv")
if kind == 0:
df_dataset['dist'] = df_dataset.apply(
lambda row: self.project(G, row['pickup_longitude'], row['pickup_latitude'])[1], axis=1)
df_dataset = df_dataset[df_dataset['dist'] <= 0.001]
df_dataset.to_csv("../train_dataset/processdataset.csv")
# 绘制没映射之前的多层图
df_dataset = pd.read_csv("../train_dataset/processdataset.csv")
df_dataset['geometry'] = df_dataset.apply(
lambda row: Point(float(row['pickup_longitude']), float(row['pickup_latitude'])), axis=1)
df_dataset_geo = gpd.GeoDataFrame(df_dataset, crs=gdf_edges.crs, geometry=df_dataset.geometry)
fig, ax = plt.subplots(figsize=(30, 30))
df_dataset_geo.plot(ax=ax, color='green', markersize=1)
gdf_edges.plot(ax=ax, cmap='Reds')
ax.set_axis_off()
plt.show()
elif kind == 1:
df_dataset['dist'] = df_dataset.apply(
lambda row: self.project(G, row['dropoff_longitude'], row['dropoff_latitude'])[1], axis=1)
df_dataset = df_dataset[df_dataset['dist'] <= 0.001]
df_dataset.to_csv("../train_dataset/processdataset.csv")
# 绘制没映射之前的多层图
df_dataset = pd.read_csv("../train_dataset/processdataset.csv")
df_dataset['geometry'] = df_dataset.apply(
lambda row: Point(float(row['dropoff_longitude']), float(row['dropoff_latitude'])), axis=1)
df_dataset_geo = gpd.GeoDataFrame(df_dataset, crs=gdf_edges.crs, geometry=df_dataset.geometry)
fig, ax = plt.subplots(figsize=(30, 30))
df_dataset_geo.plot(ax=ax, color='green', markersize=1)
gdf_edges.plot(ax=ax, cmap='Reds')
ax.set_axis_off()
plt.show()
else:
df_dataset['dist1'] = df_dataset.apply(
lambda row: self.project(G, row['pickup_longitude'], row['pickup_latitude'])[1], axis=1)
df_dataset['dist2'] = df_dataset.apply(
lambda row: self.project(G, row['dropoff_longitude'], row['dropoff_latitude'])[1], axis=1)
df_dataset = df_dataset[df_dataset['dist1'] <= 0.001 and df_dataset['dist2'] <= 0.001]
df_dataset.to_csv("../train_dataset/processdataset.csv")
# 绘制没映射之前的多层图
df_dataset = pd.read_csv("../train_dataset/processdataset.csv")
df_dataset['geometry'] = df_dataset.apply(lambda row: LineString(
[(float(row['pickup_longitude']), float(row['pickup_latitude'])),
(float(row['dropoff_longitude']), float(row['dropoff_latitude']))]), axis=1)
df_dataset_geo = gpd.GeoDataFrame(df_dataset, crs=gdf_edges.crs, geometry=df_dataset.geometry)
fig, ax = plt.subplots(figsize=(30, 30))
df_dataset_geo.plot(ax=ax, color='green', markersize=1)
gdf_edges.plot(ax=ax, cmap='Reds')
ax.set_axis_off()
plt.show()
def plot_normal_path(self, origin, destination):
# 载入文件模块
df_nodelist = pd.read_csv('../train_dataset/normalnodelist.csv')
# 使用Dijsktra算法求出最短路径的列表
traveltime, path = self.modified_dijkstras(origin, destination)
print(f'起点:{origin},终点:{destination},行程时间:{traveltime}')
# 将network对象转换成geodatafram对象
gdf_nodes, gdf_edges = ox.graph_to_gdfs(self.G)
os.makedirs(os.path.join('..', 'train_dataset'), exist_ok=True) # 创建人工目录
path_file = os.path.join('..', 'train_dataset', 'normal_path.csv')
# 将点的经度和纬度更新到点的属性字典中
for u, d in G.nodes(data=True):
u_lng = df_nodelist[df_nodelist.node == u].values.squeeze()[1]
u_lat = df_nodelist[df_nodelist.node == u].values.squeeze()[2]
d['longitude'] = u_lng
d['latitude'] = u_lat
# dataframe只能存储纯文本
with open(path_file, 'w') as f:
f.write('index,node1_x,node1_y,node2_x,node2_y\n')
for i in range(len(path) - 1):
f.write('{0},{1},{2},{3},{4}\n'.format(i, self.G.nodes[path[i]]['longitude'],
self.G.nodes[path[i]]['latitude'],
self.G.nodes[path[i + 1]]['longitude'],
self.G.nodes[path[i + 1]]['latitude']))
# 创建dataframe对象
path_edges = pd.read_csv("../train_dataset/normal_path.csv")
path_edges['geometry'] = path_edges.apply(
lambda row: LineString([(row.node1_x, row.node1_y), (row.node2_x, row.node2_y)]), axis=1)
# 创建geodataframe对象
path_crs = {'init': 'epsg:4326'}
gdf_path_edges = gpd.GeoDataFrame(path_edges, crs=path_crs, geometry=path_edges.geometry)
# 绘制多层图
fig, ax = plt.subplots(figsize=(10, 10))
# column = 'arcTime' cmap = 'Reds
gdf_edges.plot(ax=ax, column='arcTime', cmap='RdYlGn')
# 按照类别绘制颜色,一个颜色一个类别
gdf_path_edges.plot(ax=ax, color='black', categorical=True)
ax.set_axis_off()
plt.show()
def plot_real_path(self, o_lng, o_lat, d_lng, d_lat):
if self.manual == True:
self.G = self.Graph()
pickup_longitude = o_lng
pickup_latitude = o_lat
dropoff_longitude = d_lng
dropoff_latitude = d_lat
pickup_osmid = self.project(self.G, pickup_longitude, pickup_latitude)[0]
projected_pickup_longitude = self.G.nodes[pickup_osmid]['x']
projected_pickup_latitude = self.G.nodes[pickup_osmid]['y']
dropoff_osmid = self.project(self.G, dropoff_longitude, dropoff_latitude)[0]
projected_dropoff_longitude = self.G.nodes[dropoff_osmid]['x']
projected_dropoff_latitude = self.G.nodes[dropoff_osmid]['y']
pickup_tuple = (projected_pickup_longitude, projected_pickup_latitude)
dropoff_tuple = (projected_dropoff_longitude, projected_dropoff_latitude)
# 使用Dijsktra算法求出最短路径的列表
traveltime, path = self.modified_dijkstras(pickup_osmid, dropoff_osmid)
print(f'起点:{o_lng},{o_lat},终点:{d_lng},{d_lat},行程时间:{traveltime}')
# 将network对象转换成geodatafram对象
gdf_nodes, gdf_edges = ox.graph_to_gdfs(self.G)
os.makedirs(os.path.join('..', 'train_dataset'), exist_ok=True) # 创建人工目录
path_file = os.path.join('..', 'train_dataset', 'real_path.csv')
# 创建路线的linesring对象
# dataframe只能存储纯文本
with open(path_file, 'w') as f:
f.write('index,node1_x,node1_y,node2_x,node2_y\n')
for i in range(len(path) - 1):
f.write('{0},{1},{2},{3},{4}\n'.format(i, self.G.nodes[path[i]]['x'], self.G.nodes[path[i]]['y'],
self.G.nodes[path[i + 1]]['x'], self.G.nodes[path[i + 1]]['y']))
# 创建dataframe对象
path_edges = pd.read_csv("../train_dataset/real_path.csv")
path_edges['geometry'] = path_edges.apply(
lambda row: LineString([(row.node1_x, row.node1_y), (row.node2_x, row.node2_y)]), axis=1)
# 创建geodataframe对象
path_crs = {'init': 'epsg:4326'}
gdf_path_edges = gpd.GeoDataFrame(path_edges, crs=path_crs, geometry=path_edges.geometry)
# 绘制多层图
fig, ax = plt.subplots(figsize=(20, 20))
# column = 'arcTime' cmap = 'Reds
gdf_edges.plot(ax=ax, color="red", categorical=True)
# 按照类别绘制颜色,一个颜色一个类别
gdf_path_edges.plot(ax=ax, color='black', categorical=True)
ax.set_axis_off()
plt.show()
| 48.804786
| 123
| 0.48631
| 8,550
| 77,502
| 4.224561
| 0.062573
| 0.029014
| 0.026993
| 0.037375
| 0.895487
| 0.880537
| 0.854817
| 0.83768
| 0.828931
| 0.821844
| 0
| 0.040643
| 0.382519
| 77,502
| 1,588
| 124
| 48.804786
| 0.714122
| 0.080114
| 0
| 0.783601
| 0
| 0
| 0.10942
| 0.035915
| 0.010989
| 0
| 0
| 0
| 0
| 1
| 0.024514
| false
| 0.001691
| 0.016906
| 0
| 0.071851
| 0.009298
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d3d166f509b55cfac49c35a52150d573ade6c064
| 242
|
py
|
Python
|
discorgeous/__init__.py
|
Xtansia/Discorgeous
|
6b84a39d5d48c1eb1a74cbc32959e5e09b47a93b
|
[
"MIT"
] | 6
|
2018-10-14T00:39:15.000Z
|
2021-08-07T23:49:23.000Z
|
discorgeous/__init__.py
|
Xtansia/Discorgeous
|
6b84a39d5d48c1eb1a74cbc32959e5e09b47a93b
|
[
"MIT"
] | 2
|
2018-10-02T15:18:39.000Z
|
2021-06-01T22:49:27.000Z
|
discorgeous/__init__.py
|
Xtansia/Discorgeous
|
6b84a39d5d48c1eb1a74cbc32959e5e09b47a93b
|
[
"MIT"
] | 2
|
2020-03-17T05:56:51.000Z
|
2021-03-26T18:50:21.000Z
|
from .configuration.general import Configuration as GeneralConfiguration
from .configuration.client import Configuration as ClientConfiguration
from .configuration.server import Configuration as ServerConfiguration
from .client import Client
| 48.4
| 72
| 0.880165
| 25
| 242
| 8.52
| 0.4
| 0.239437
| 0.295775
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 242
| 4
| 73
| 60.5
| 0.968182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d3fb48eb6a1ad601d6c0cff9d165c9962fa0d5d0
| 32,367
|
py
|
Python
|
Exp_Main/migrations/0001_squashed_0053_auto_20210427_1647.py
|
SimonSchubotz/Electronic-Laboratory-Notebook
|
a5dc3daa76b07370c1ee5b7e74fb6c780c3d3c97
|
[
"Apache-2.0"
] | null | null | null |
Exp_Main/migrations/0001_squashed_0053_auto_20210427_1647.py
|
SimonSchubotz/Electronic-Laboratory-Notebook
|
a5dc3daa76b07370c1ee5b7e74fb6c780c3d3c97
|
[
"Apache-2.0"
] | null | null | null |
Exp_Main/migrations/0001_squashed_0053_auto_20210427_1647.py
|
SimonSchubotz/Electronic-Laboratory-Notebook
|
a5dc3daa76b07370c1ee5b7e74fb6c780c3d3c97
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.1.3 on 2021-05-04 20:37
import datetime
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
class Migration(migrations.Migration):
replaces = [('Exp_Main', '0001_initial'), ('Exp_Main', '0002_auto_20200716_1159'), ('Exp_Main', '0003_auto_20200716_1201'), ('Exp_Main', '0004_auto_20200716_1321'), ('Exp_Main', '0005_auto_20200716_1437'), ('Exp_Main', '0006_auto_20200716_1438'), ('Exp_Main', '0007_auto_20200716_1440'), ('Exp_Main', '0008_auto_20200716_1502'), ('Exp_Main', '0009_auto_20200716_1524'), ('Exp_Main', '0010_auto_20200716_1539'), ('Exp_Main', '0011_auto_20200716_1542'), ('Exp_Main', '0012_con_dip_hed_hev_kur_lqb_naf_nel_rld_sem_sfg'), ('Exp_Main', '0013_oca_sub_exp'), ('Exp_Main', '0014_auto_20200805_1249'), ('Exp_Main', '0015_auto_20200806_1139'), ('Exp_Main', '0016_auto_20200817_2027'), ('Exp_Main', '0017_sel'), ('Exp_Main', '0018_sel_sub_exp'), ('Exp_Main', '0019_auto_20200826_1218'), ('Exp_Main', '0020_auto_20200826_1300'), ('Exp_Main', '0021_auto_20200826_2108'), ('Exp_Main', '0022_sel_dash'), ('Exp_Main', '0023_auto_20200901_2129'), ('Exp_Main', '0024_sfg_dash'), ('Exp_Main', '0025_auto_20200901_2324'), ('Exp_Main', '0026_auto_20200918_1347'), ('Exp_Main', '0027_con_temp_mixing_ratio'), ('Exp_Main', '0028_auto_20200921_1830'), ('Exp_Main', '0029_auto_20200921_1847'), ('Exp_Main', '0030_auto_20200921_1848'), ('Exp_Main', '0031_remove_expbase_exp_type'), ('Exp_Main', '0032_auto_20201104_1900'), ('Exp_Main', '0033_group'), ('Exp_Main', '0034_auto_20201106_1546'), ('Exp_Main', '0035_auto_20201106_1554'), ('Exp_Main', '0036_auto_20201106_1618'), ('Exp_Main', '0037_remove_group_collapsed'), ('Exp_Main', '0038_group_dash'), ('Exp_Main', '0039_auto_20201117_0950'), ('Exp_Main', '0040_grp'), ('Exp_Main', '0041_comparison'), ('Exp_Main', '0042_auto_20201129_1332'), ('Exp_Main', '0043_sld'), ('Exp_Main', '0044_expbase_project'), ('Exp_Main', '0045_analysis_result_fit_result_oscillating_drop_analisis'), ('Exp_Main', '0046_auto_20210306_2222'), ('Exp_Main', '0047_auto_20210307_1534'), ('Exp_Main', '0048_exppath_pathprocesseddata'), ('Exp_Main', '0049_oca_link_osz_join_lsp'), ('Exp_Main', '0050_auto_20210310_1908'), ('Exp_Main', '0051_auto_20210311_1606'), ('Exp_Main', '0052_auto_20210322_1601'), ('Exp_Main', '0053_auto_20210427_1647')]
initial = True
dependencies = [
('Exp_Sub', '0001_squashed_0072_auto_20210427_1647'),
('Lab_Dash', '0001_squashed_0062_auto_20210427_1647'),
('Lab_Misc', '0001_squashed_0009_samplebase_comment'),
]
operations = [
migrations.CreateModel(
name='ExpPath',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Abbrev', models.CharField(max_length=3, unique=True)),
('Name', models.TextField(unique=True)),
('Path', models.TextField(unique=True)),
('level', models.PositiveIntegerField(default=0, editable=False)),
('lft', models.PositiveIntegerField(default=1, editable=False)),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='Exp_Main.exppath')),
('rght', models.PositiveIntegerField(default=2, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, default=1, editable=False)),
('File_ending', models.ManyToManyField(blank=True, to='Exp_Main.FileEnding')),
],
),
migrations.CreateModel(
name='FileEnding',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Ending', models.TextField(unique=True)),
('Comment', models.TextField(blank=True, null=True)),
('Name', models.TextField(blank=True, null=True, unique=True)),
],
),
migrations.CreateModel(
name='DIP',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='HED',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link_Video', models.TextField(blank=True, null=True)),
('Link', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='HEV',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='KUR',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
('Dash', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Lab_Dash.oca')),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='LQB',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='NAF',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link_Video', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='NEL',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link_Video', models.TextField(blank=True, null=True)),
('Link', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='OCA',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
('Dash', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Lab_Dash.oca')),
('Sub_Exp', models.ManyToManyField(blank=True, to='Exp_Sub.ExpBase')),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='RLD',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
('t1', models.TextField(blank=True, null=True)),
('t2', models.TextField(blank=True, null=True)),
('t3', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='SEL',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link_XLSX', models.TextField(blank=True, null=True)),
('Link', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
('Sub_Exp', models.ManyToManyField(blank=True, to='Exp_Sub.ExpBase')),
('Dash', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Lab_Dash.sel')),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='SEM',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='SFG',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link_Video', models.TextField(blank=True, null=True)),
('Link', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Atmosphere_relax', models.TextField(blank=True, null=True)),
('Temp_Flowrate', models.TextField(blank=True, null=True)),
('Temp_Volume', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
('XPos_mm', models.FloatField(blank=True, null=True)),
('YPos_mm', models.FloatField(blank=True, null=True)),
('Dash', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Lab_Dash.sfg')),
('Measurement_Mode', models.CharField(blank=True, choices=[('1', 'External Reflection'), ('2', 'Internal Reflection')], max_length=1, null=True)),
('Polarization', models.CharField(blank=True, choices=[('1', 'PPP'), ('2', 'SSP')], max_length=1, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='Observation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.TextField(blank=True, db_column='Name:', null=True, unique=True)),
('Description', models.TextField(blank=True, db_column='Description:', null=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ObservationHierarchy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.TextField(blank=True, db_column='Name:', null=True, unique=True)),
('Description', models.TextField(blank=True, db_column='Description:', null=True)),
('lft', models.PositiveIntegerField(editable=False)),
('rght', models.PositiveIntegerField(editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(editable=False)),
('Observation', models.ManyToManyField(blank=True, to='Exp_Main.Observation')),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='Exp_Main.observationhierarchy')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='CON',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='ExpType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.TextField(blank=True, db_column='Name:', null=True, unique=True)),
('Description', models.TextField(blank=True, db_column='Description:', null=True)),
],
),
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.TextField(blank=True, db_column='Name:', null=True, unique=True)),
('Description', models.TextField(blank=True, db_column='Description:', null=True)),
('lft', models.PositiveIntegerField(editable=False)),
('rght', models.PositiveIntegerField(editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(editable=False)),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='Exp_Main.group')),
('ExpBase', models.ManyToManyField(blank=True, to='Exp_Main.ExpBase')),
('Dash', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Lab_Dash.grp')),
('Comment', models.TextField(blank=True, null=True)),
('Date_time', models.DateTimeField(blank=True, default=datetime.datetime(1900, 1, 1, 0, 0))),
('Device', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Exp_Main.exppath')),
('Observation', models.ManyToManyField(blank=True, to='Exp_Main.Observation')),
('Sample_name', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Lab_Misc.samplebase')),
('Type', models.ManyToManyField(blank=True, to='Exp_Main.ExpType')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='GRP',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Group', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Exp_Main.group')),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='Comparison',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.TextField(blank=True, null=True, unique=True)),
('ExpBase', models.ManyToManyField(blank=True, to='Exp_Main.ExpBase')),
('Dash', models.ForeignKey(blank=True, default=1, on_delete=django.db.models.deletion.CASCADE, to='Lab_Dash.comparison')),
],
),
migrations.CreateModel(
name='SLD',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Temp_Observation', models.TextField(blank=True, null=True)),
('Temp_Hypothesis', models.TextField(blank=True, null=True)),
('Temp_Mixing_ratio', models.TextField(blank=True, null=True)),
('Temp_Buzz_word', models.TextField(blank=True, null=True)),
('Temp_Bath_time', models.TextField(blank=True, null=True)),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='ExpBase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Sample_name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Lab_Misc.samplebase')),
('Name', models.TextField(blank=True, null=True, unique=True)),
('Date_time', models.DateTimeField(blank=True, default=datetime.datetime(1900, 1, 1, 0, 0))),
('Device', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Exp_Main.exppath')),
('Comment', models.TextField(blank=True, null=True)),
('Observation', models.ManyToManyField(blank=True, to='Exp_Main.Observation')),
('Type', models.ManyToManyField(blank=True, to='Exp_Main.ExpType')),
('Project', models.ManyToManyField(blank=True, to='Lab_Misc.ProjectEntry')),
],
),
migrations.CreateModel(
name='Analysis_result',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Drop_Nr', models.IntegerField(blank=True, null=True)),
('LoR_CL', models.TextField(blank=True, choices=[('Left', 'Left'), ('Right', 'Right')], null=True)),
('Max_CL', models.FloatField(blank=True, null=True)),
('Max_CA', models.FloatField(blank=True, null=True)),
('Min_CA', models.FloatField(blank=True, null=True)),
('Min_AdvCA', models.FloatField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='Fit_result',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Drop_Nr', models.IntegerField(blank=True, null=True)),
('LoR_CL', models.TextField(blank=True, choices=[('Left', 'Left'), ('Right', 'Right')], null=True)),
('ErroVal', models.TextField(blank=True, choices=[('Value', 'Value'), ('Error', 'Error')], null=True)),
('x_pos', models.FloatField(blank=True, null=True)),
('y_pos', models.FloatField(blank=True, null=True)),
('Step_width', models.FloatField(blank=True, null=True)),
('Step_hight', models.FloatField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='Oscillating_Drop_Analisis',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.TextField(unique=True)),
('Drop_center', models.FloatField(blank=True, null=True)),
('Analysis_result', models.ManyToManyField(blank=True, to='Exp_Main.Analysis_result')),
('Exp', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Exp_Main.expbase')),
('Fit_result', models.ManyToManyField(blank=True, to='Exp_Main.Fit_result')),
],
),
migrations.SeparateDatabaseAndState(
database_operations=[
migrations.AlterModelTable(
name='Comparison',
table='Analysis_comparison',
),
],
state_operations=[
migrations.DeleteModel(
name='Comparison',
),
],
),
migrations.RemoveField(
model_name='oscillating_drop_analisis',
name='Analysis_result',
),
migrations.RemoveField(
model_name='oscillating_drop_analisis',
name='Exp',
),
migrations.RemoveField(
model_name='oscillating_drop_analisis',
name='Fit_result',
),
migrations.DeleteModel(
name='Analysis_result',
),
migrations.DeleteModel(
name='Fit_result',
),
migrations.DeleteModel(
name='Oscillating_Drop_Analisis',
),
migrations.AddField(
model_name='exppath',
name='PathProcessedData',
field=models.TextField(blank=True, null=True, unique=True),
),
migrations.AddField(
model_name='oca',
name='Link_Osz_join_LSP',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='hed',
name='Liquid',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Exp_Main.liquid'),
),
migrations.AddField(
model_name='oca',
name='Liquid',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Exp_Main.liquid'),
),
migrations.CreateModel(
name='LPT',
fields=[
('expbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='Exp_Main.expbase')),
('Link', models.TextField(blank=True, null=True)),
('Link_Data', models.TextField(blank=True, null=True)),
('Link_PDF', models.TextField(blank=True, null=True)),
('Dash', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Lab_Dash.oca')),
('Liquid', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Exp_Main.liquid')),
('Sub_Exp', models.ManyToManyField(blank=True, to='Exp_Sub.ExpBase')),
],
bases=('Exp_Main.expbase',),
),
migrations.CreateModel(
name='Liquid',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.TextField()),
('Born', models.DateTimeField(blank=True, default=datetime.datetime(2021, 4, 27, 16, 47, 33, 263610), null=True)),
('Death', models.DateTimeField(blank=True, default=datetime.datetime(2021, 4, 27, 16, 47, 33, 263610), null=True)),
('Description', models.TextField(blank=True, null=True)),
],
),
]
| 64.093069
| 2,147
| 0.598418
| 3,485
| 32,367
| 5.360689
| 0.087518
| 0.105503
| 0.130125
| 0.170164
| 0.843914
| 0.834172
| 0.808051
| 0.781715
| 0.763676
| 0.727545
| 0
| 0.030822
| 0.246208
| 32,367
| 504
| 2,148
| 64.220238
| 0.734896
| 0.00139
| 0
| 0.800403
| 1
| 0
| 0.188738
| 0.048422
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008065
| 0
| 0.018145
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
31841a41fa7d1dd84376877859d186f6ab58058c
| 60,450
|
py
|
Python
|
sdk/python/pulumi_azure/trafficmanager/endpoint.py
|
ScriptBox99/pulumi-azure
|
1b8c6d5479ccabc39094741eac25a8ca44c8833a
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/trafficmanager/endpoint.py
|
ScriptBox99/pulumi-azure
|
1b8c6d5479ccabc39094741eac25a8ca44c8833a
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/trafficmanager/endpoint.py
|
ScriptBox99/pulumi-azure
|
1b8c6d5479ccabc39094741eac25a8ca44c8833a
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['EndpointArgs', 'Endpoint']
@pulumi.input_type
class EndpointArgs:
def __init__(__self__, *,
profile_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
type: pulumi.Input[str],
custom_headers: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointCustomHeaderArgs']]]] = None,
endpoint_location: Optional[pulumi.Input[str]] = None,
endpoint_status: Optional[pulumi.Input[str]] = None,
geo_mappings: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
min_child_endpoints: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv4: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv6: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
subnets: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointSubnetArgs']]]] = None,
target: Optional[pulumi.Input[str]] = None,
target_resource_id: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a Endpoint resource.
:param pulumi.Input[str] profile_name: The name of the Traffic Manager Profile to attach
create the Traffic Manager endpoint.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the Traffic Manager Profile exists.
:param pulumi.Input[str] type: The Endpoint type, must be one of:
- `azureEndpoints`
- `externalEndpoints`
- `nestedEndpoints`
:param pulumi.Input[Sequence[pulumi.Input['EndpointCustomHeaderArgs']]] custom_headers: One or more `custom_header` blocks as defined below
:param pulumi.Input[str] endpoint_location: Specifies the Azure location of the Endpoint,
this must be specified for Profiles using the `Performance` routing method
if the Endpoint is of either type `nestedEndpoints` or `externalEndpoints`.
For Endpoints of type `azureEndpoints` the value will be taken from the
location of the Azure target resource.
:param pulumi.Input[str] endpoint_status: The status of the Endpoint, can be set to
either `Enabled` or `Disabled`. Defaults to `Enabled`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] geo_mappings: A list of Geographic Regions used to distribute traffic, such as `WORLD`, `UK` or `DE`. The same location can't be specified in two endpoints. [See the Geographic Hierarchies documentation for more information](https://docs.microsoft.com/en-us/rest/api/trafficmanager/geographichierarchies/getdefault).
:param pulumi.Input[int] min_child_endpoints: This argument specifies the minimum number
of endpoints that must be ‘online’ in the child profile in order for the
parent profile to direct traffic to any of the endpoints in that child
profile. This argument only applies to Endpoints of type `nestedEndpoints`
and has to be larger than `0`.
:param pulumi.Input[int] minimum_required_child_endpoints_ipv4: This argument specifies the minimum number of IPv4 (DNS record type A) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
:param pulumi.Input[int] minimum_required_child_endpoints_ipv6: This argument specifies the minimum number of IPv6 (DNS record type AAAA) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
:param pulumi.Input[str] name: The name of the Traffic Manager endpoint. Changing this forces a
new resource to be created.
:param pulumi.Input[int] priority: Specifies the priority of this Endpoint, this must be
specified for Profiles using the `Priority` traffic routing method. Supports
values between 1 and 1000, with no Endpoints sharing the same value. If
omitted the value will be computed in order of creation.
:param pulumi.Input[Sequence[pulumi.Input['EndpointSubnetArgs']]] subnets: One or more `subnet` blocks as defined below
:param pulumi.Input[str] target: The FQDN DNS name of the target. This argument must be
provided for an endpoint of type `externalEndpoints`, for other types it
will be computed.
:param pulumi.Input[str] target_resource_id: The resource id of an Azure resource to
target. This argument must be provided for an endpoint of type
`azureEndpoints` or `nestedEndpoints`.
:param pulumi.Input[int] weight: Specifies how much traffic should be distributed to this
endpoint, this must be specified for Profiles using the `Weighted` traffic
routing method. Supports values between 1 and 1000.
"""
pulumi.set(__self__, "profile_name", profile_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "type", type)
if custom_headers is not None:
pulumi.set(__self__, "custom_headers", custom_headers)
if endpoint_location is not None:
pulumi.set(__self__, "endpoint_location", endpoint_location)
if endpoint_status is not None:
pulumi.set(__self__, "endpoint_status", endpoint_status)
if geo_mappings is not None:
pulumi.set(__self__, "geo_mappings", geo_mappings)
if min_child_endpoints is not None:
pulumi.set(__self__, "min_child_endpoints", min_child_endpoints)
if minimum_required_child_endpoints_ipv4 is not None:
pulumi.set(__self__, "minimum_required_child_endpoints_ipv4", minimum_required_child_endpoints_ipv4)
if minimum_required_child_endpoints_ipv6 is not None:
pulumi.set(__self__, "minimum_required_child_endpoints_ipv6", minimum_required_child_endpoints_ipv6)
if name is not None:
pulumi.set(__self__, "name", name)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if subnets is not None:
pulumi.set(__self__, "subnets", subnets)
if target is not None:
pulumi.set(__self__, "target", target)
if target_resource_id is not None:
pulumi.set(__self__, "target_resource_id", target_resource_id)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="profileName")
def profile_name(self) -> pulumi.Input[str]:
"""
The name of the Traffic Manager Profile to attach
create the Traffic Manager endpoint.
"""
return pulumi.get(self, "profile_name")
@profile_name.setter
def profile_name(self, value: pulumi.Input[str]):
pulumi.set(self, "profile_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group where the Traffic Manager Profile exists.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The Endpoint type, must be one of:
- `azureEndpoints`
- `externalEndpoints`
- `nestedEndpoints`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="customHeaders")
def custom_headers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EndpointCustomHeaderArgs']]]]:
"""
One or more `custom_header` blocks as defined below
"""
return pulumi.get(self, "custom_headers")
@custom_headers.setter
def custom_headers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointCustomHeaderArgs']]]]):
pulumi.set(self, "custom_headers", value)
@property
@pulumi.getter(name="endpointLocation")
def endpoint_location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the Azure location of the Endpoint,
this must be specified for Profiles using the `Performance` routing method
if the Endpoint is of either type `nestedEndpoints` or `externalEndpoints`.
For Endpoints of type `azureEndpoints` the value will be taken from the
location of the Azure target resource.
"""
return pulumi.get(self, "endpoint_location")
@endpoint_location.setter
def endpoint_location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_location", value)
@property
@pulumi.getter(name="endpointStatus")
def endpoint_status(self) -> Optional[pulumi.Input[str]]:
"""
The status of the Endpoint, can be set to
either `Enabled` or `Disabled`. Defaults to `Enabled`.
"""
return pulumi.get(self, "endpoint_status")
@endpoint_status.setter
def endpoint_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_status", value)
@property
@pulumi.getter(name="geoMappings")
def geo_mappings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of Geographic Regions used to distribute traffic, such as `WORLD`, `UK` or `DE`. The same location can't be specified in two endpoints. [See the Geographic Hierarchies documentation for more information](https://docs.microsoft.com/en-us/rest/api/trafficmanager/geographichierarchies/getdefault).
"""
return pulumi.get(self, "geo_mappings")
@geo_mappings.setter
def geo_mappings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "geo_mappings", value)
@property
@pulumi.getter(name="minChildEndpoints")
def min_child_endpoints(self) -> Optional[pulumi.Input[int]]:
"""
This argument specifies the minimum number
of endpoints that must be ‘online’ in the child profile in order for the
parent profile to direct traffic to any of the endpoints in that child
profile. This argument only applies to Endpoints of type `nestedEndpoints`
and has to be larger than `0`.
"""
return pulumi.get(self, "min_child_endpoints")
@min_child_endpoints.setter
def min_child_endpoints(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_child_endpoints", value)
@property
@pulumi.getter(name="minimumRequiredChildEndpointsIpv4")
def minimum_required_child_endpoints_ipv4(self) -> Optional[pulumi.Input[int]]:
"""
This argument specifies the minimum number of IPv4 (DNS record type A) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
"""
return pulumi.get(self, "minimum_required_child_endpoints_ipv4")
@minimum_required_child_endpoints_ipv4.setter
def minimum_required_child_endpoints_ipv4(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "minimum_required_child_endpoints_ipv4", value)
@property
@pulumi.getter(name="minimumRequiredChildEndpointsIpv6")
def minimum_required_child_endpoints_ipv6(self) -> Optional[pulumi.Input[int]]:
"""
This argument specifies the minimum number of IPv6 (DNS record type AAAA) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
"""
return pulumi.get(self, "minimum_required_child_endpoints_ipv6")
@minimum_required_child_endpoints_ipv6.setter
def minimum_required_child_endpoints_ipv6(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "minimum_required_child_endpoints_ipv6", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Traffic Manager endpoint. Changing this forces a
new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the priority of this Endpoint, this must be
specified for Profiles using the `Priority` traffic routing method. Supports
values between 1 and 1000, with no Endpoints sharing the same value. If
omitted the value will be computed in order of creation.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def subnets(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EndpointSubnetArgs']]]]:
"""
One or more `subnet` blocks as defined below
"""
return pulumi.get(self, "subnets")
@subnets.setter
def subnets(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointSubnetArgs']]]]):
pulumi.set(self, "subnets", value)
@property
@pulumi.getter
def target(self) -> Optional[pulumi.Input[str]]:
"""
The FQDN DNS name of the target. This argument must be
provided for an endpoint of type `externalEndpoints`, for other types it
will be computed.
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target", value)
@property
@pulumi.getter(name="targetResourceId")
def target_resource_id(self) -> Optional[pulumi.Input[str]]:
"""
The resource id of an Azure resource to
target. This argument must be provided for an endpoint of type
`azureEndpoints` or `nestedEndpoints`.
"""
return pulumi.get(self, "target_resource_id")
@target_resource_id.setter
def target_resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_resource_id", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
"""
Specifies how much traffic should be distributed to this
endpoint, this must be specified for Profiles using the `Weighted` traffic
routing method. Supports values between 1 and 1000.
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
@pulumi.input_type
class _EndpointState:
def __init__(__self__, *,
custom_headers: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointCustomHeaderArgs']]]] = None,
endpoint_location: Optional[pulumi.Input[str]] = None,
endpoint_monitor_status: Optional[pulumi.Input[str]] = None,
endpoint_status: Optional[pulumi.Input[str]] = None,
geo_mappings: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
min_child_endpoints: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv4: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv6: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
profile_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnets: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointSubnetArgs']]]] = None,
target: Optional[pulumi.Input[str]] = None,
target_resource_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering Endpoint resources.
:param pulumi.Input[Sequence[pulumi.Input['EndpointCustomHeaderArgs']]] custom_headers: One or more `custom_header` blocks as defined below
:param pulumi.Input[str] endpoint_location: Specifies the Azure location of the Endpoint,
this must be specified for Profiles using the `Performance` routing method
if the Endpoint is of either type `nestedEndpoints` or `externalEndpoints`.
For Endpoints of type `azureEndpoints` the value will be taken from the
location of the Azure target resource.
:param pulumi.Input[str] endpoint_status: The status of the Endpoint, can be set to
either `Enabled` or `Disabled`. Defaults to `Enabled`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] geo_mappings: A list of Geographic Regions used to distribute traffic, such as `WORLD`, `UK` or `DE`. The same location can't be specified in two endpoints. [See the Geographic Hierarchies documentation for more information](https://docs.microsoft.com/en-us/rest/api/trafficmanager/geographichierarchies/getdefault).
:param pulumi.Input[int] min_child_endpoints: This argument specifies the minimum number
of endpoints that must be ‘online’ in the child profile in order for the
parent profile to direct traffic to any of the endpoints in that child
profile. This argument only applies to Endpoints of type `nestedEndpoints`
and has to be larger than `0`.
:param pulumi.Input[int] minimum_required_child_endpoints_ipv4: This argument specifies the minimum number of IPv4 (DNS record type A) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
:param pulumi.Input[int] minimum_required_child_endpoints_ipv6: This argument specifies the minimum number of IPv6 (DNS record type AAAA) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
:param pulumi.Input[str] name: The name of the Traffic Manager endpoint. Changing this forces a
new resource to be created.
:param pulumi.Input[int] priority: Specifies the priority of this Endpoint, this must be
specified for Profiles using the `Priority` traffic routing method. Supports
values between 1 and 1000, with no Endpoints sharing the same value. If
omitted the value will be computed in order of creation.
:param pulumi.Input[str] profile_name: The name of the Traffic Manager Profile to attach
create the Traffic Manager endpoint.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the Traffic Manager Profile exists.
:param pulumi.Input[Sequence[pulumi.Input['EndpointSubnetArgs']]] subnets: One or more `subnet` blocks as defined below
:param pulumi.Input[str] target: The FQDN DNS name of the target. This argument must be
provided for an endpoint of type `externalEndpoints`, for other types it
will be computed.
:param pulumi.Input[str] target_resource_id: The resource id of an Azure resource to
target. This argument must be provided for an endpoint of type
`azureEndpoints` or `nestedEndpoints`.
:param pulumi.Input[str] type: The Endpoint type, must be one of:
- `azureEndpoints`
- `externalEndpoints`
- `nestedEndpoints`
:param pulumi.Input[int] weight: Specifies how much traffic should be distributed to this
endpoint, this must be specified for Profiles using the `Weighted` traffic
routing method. Supports values between 1 and 1000.
"""
if custom_headers is not None:
pulumi.set(__self__, "custom_headers", custom_headers)
if endpoint_location is not None:
pulumi.set(__self__, "endpoint_location", endpoint_location)
if endpoint_monitor_status is not None:
pulumi.set(__self__, "endpoint_monitor_status", endpoint_monitor_status)
if endpoint_status is not None:
pulumi.set(__self__, "endpoint_status", endpoint_status)
if geo_mappings is not None:
pulumi.set(__self__, "geo_mappings", geo_mappings)
if min_child_endpoints is not None:
pulumi.set(__self__, "min_child_endpoints", min_child_endpoints)
if minimum_required_child_endpoints_ipv4 is not None:
pulumi.set(__self__, "minimum_required_child_endpoints_ipv4", minimum_required_child_endpoints_ipv4)
if minimum_required_child_endpoints_ipv6 is not None:
pulumi.set(__self__, "minimum_required_child_endpoints_ipv6", minimum_required_child_endpoints_ipv6)
if name is not None:
pulumi.set(__self__, "name", name)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if profile_name is not None:
pulumi.set(__self__, "profile_name", profile_name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if subnets is not None:
pulumi.set(__self__, "subnets", subnets)
if target is not None:
pulumi.set(__self__, "target", target)
if target_resource_id is not None:
pulumi.set(__self__, "target_resource_id", target_resource_id)
if type is not None:
pulumi.set(__self__, "type", type)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="customHeaders")
def custom_headers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EndpointCustomHeaderArgs']]]]:
"""
One or more `custom_header` blocks as defined below
"""
return pulumi.get(self, "custom_headers")
@custom_headers.setter
def custom_headers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointCustomHeaderArgs']]]]):
pulumi.set(self, "custom_headers", value)
@property
@pulumi.getter(name="endpointLocation")
def endpoint_location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the Azure location of the Endpoint,
this must be specified for Profiles using the `Performance` routing method
if the Endpoint is of either type `nestedEndpoints` or `externalEndpoints`.
For Endpoints of type `azureEndpoints` the value will be taken from the
location of the Azure target resource.
"""
return pulumi.get(self, "endpoint_location")
@endpoint_location.setter
def endpoint_location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_location", value)
@property
@pulumi.getter(name="endpointMonitorStatus")
def endpoint_monitor_status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "endpoint_monitor_status")
@endpoint_monitor_status.setter
def endpoint_monitor_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_monitor_status", value)
@property
@pulumi.getter(name="endpointStatus")
def endpoint_status(self) -> Optional[pulumi.Input[str]]:
"""
The status of the Endpoint, can be set to
either `Enabled` or `Disabled`. Defaults to `Enabled`.
"""
return pulumi.get(self, "endpoint_status")
@endpoint_status.setter
def endpoint_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_status", value)
@property
@pulumi.getter(name="geoMappings")
def geo_mappings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of Geographic Regions used to distribute traffic, such as `WORLD`, `UK` or `DE`. The same location can't be specified in two endpoints. [See the Geographic Hierarchies documentation for more information](https://docs.microsoft.com/en-us/rest/api/trafficmanager/geographichierarchies/getdefault).
"""
return pulumi.get(self, "geo_mappings")
@geo_mappings.setter
def geo_mappings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "geo_mappings", value)
@property
@pulumi.getter(name="minChildEndpoints")
def min_child_endpoints(self) -> Optional[pulumi.Input[int]]:
"""
This argument specifies the minimum number
of endpoints that must be ‘online’ in the child profile in order for the
parent profile to direct traffic to any of the endpoints in that child
profile. This argument only applies to Endpoints of type `nestedEndpoints`
and has to be larger than `0`.
"""
return pulumi.get(self, "min_child_endpoints")
@min_child_endpoints.setter
def min_child_endpoints(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_child_endpoints", value)
@property
@pulumi.getter(name="minimumRequiredChildEndpointsIpv4")
def minimum_required_child_endpoints_ipv4(self) -> Optional[pulumi.Input[int]]:
"""
This argument specifies the minimum number of IPv4 (DNS record type A) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
"""
return pulumi.get(self, "minimum_required_child_endpoints_ipv4")
@minimum_required_child_endpoints_ipv4.setter
def minimum_required_child_endpoints_ipv4(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "minimum_required_child_endpoints_ipv4", value)
@property
@pulumi.getter(name="minimumRequiredChildEndpointsIpv6")
def minimum_required_child_endpoints_ipv6(self) -> Optional[pulumi.Input[int]]:
"""
This argument specifies the minimum number of IPv6 (DNS record type AAAA) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
"""
return pulumi.get(self, "minimum_required_child_endpoints_ipv6")
@minimum_required_child_endpoints_ipv6.setter
def minimum_required_child_endpoints_ipv6(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "minimum_required_child_endpoints_ipv6", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Traffic Manager endpoint. Changing this forces a
new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the priority of this Endpoint, this must be
specified for Profiles using the `Priority` traffic routing method. Supports
values between 1 and 1000, with no Endpoints sharing the same value. If
omitted the value will be computed in order of creation.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="profileName")
def profile_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Traffic Manager Profile to attach
create the Traffic Manager endpoint.
"""
return pulumi.get(self, "profile_name")
@profile_name.setter
def profile_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "profile_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group where the Traffic Manager Profile exists.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def subnets(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EndpointSubnetArgs']]]]:
"""
One or more `subnet` blocks as defined below
"""
return pulumi.get(self, "subnets")
@subnets.setter
def subnets(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointSubnetArgs']]]]):
pulumi.set(self, "subnets", value)
@property
@pulumi.getter
def target(self) -> Optional[pulumi.Input[str]]:
"""
The FQDN DNS name of the target. This argument must be
provided for an endpoint of type `externalEndpoints`, for other types it
will be computed.
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target", value)
@property
@pulumi.getter(name="targetResourceId")
def target_resource_id(self) -> Optional[pulumi.Input[str]]:
"""
The resource id of an Azure resource to
target. This argument must be provided for an endpoint of type
`azureEndpoints` or `nestedEndpoints`.
"""
return pulumi.get(self, "target_resource_id")
@target_resource_id.setter
def target_resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_resource_id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The Endpoint type, must be one of:
- `azureEndpoints`
- `externalEndpoints`
- `nestedEndpoints`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
"""
Specifies how much traffic should be distributed to this
endpoint, this must be specified for Profiles using the `Weighted` traffic
routing method. Supports values between 1 and 1000.
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
warnings.warn("""azure.trafficmanager.Endpoint has been deprecated in favor of azure.network.TrafficManagerEndpoint""", DeprecationWarning)
class Endpoint(pulumi.CustomResource):
warnings.warn("""azure.trafficmanager.Endpoint has been deprecated in favor of azure.network.TrafficManagerEndpoint""", DeprecationWarning)
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
custom_headers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointCustomHeaderArgs']]]]] = None,
endpoint_location: Optional[pulumi.Input[str]] = None,
endpoint_status: Optional[pulumi.Input[str]] = None,
geo_mappings: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
min_child_endpoints: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv4: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv6: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
profile_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointSubnetArgs']]]]] = None,
target: Optional[pulumi.Input[str]] = None,
target_resource_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Manages a Traffic Manager Endpoint.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
import pulumi_random as random
server = random.RandomId("server",
keepers={
"azi_id": 1,
},
byte_length=8)
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_traffic_manager_profile = azure.network.TrafficManagerProfile("exampleTrafficManagerProfile",
resource_group_name=example_resource_group.name,
traffic_routing_method="Weighted",
dns_config=azure.network.TrafficManagerProfileDnsConfigArgs(
relative_name=server.hex,
ttl=100,
),
monitor_config=azure.network.TrafficManagerProfileMonitorConfigArgs(
protocol="http",
port=80,
path="/",
interval_in_seconds=30,
timeout_in_seconds=9,
tolerated_number_of_failures=3,
),
tags={
"environment": "Production",
})
example_traffic_manager_endpoint = azure.network.TrafficManagerEndpoint("exampleTrafficManagerEndpoint",
resource_group_name=example_resource_group.name,
profile_name=example_traffic_manager_profile.name,
type="externalEndpoints",
weight=100)
```
## Import
Traffic Manager Endpoints can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:trafficmanager/endpoint:Endpoint exampleEndpoints /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Network/trafficManagerProfiles/mytrafficmanagerprofile1/azureEndpoints/mytrafficmanagerendpoint
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointCustomHeaderArgs']]]] custom_headers: One or more `custom_header` blocks as defined below
:param pulumi.Input[str] endpoint_location: Specifies the Azure location of the Endpoint,
this must be specified for Profiles using the `Performance` routing method
if the Endpoint is of either type `nestedEndpoints` or `externalEndpoints`.
For Endpoints of type `azureEndpoints` the value will be taken from the
location of the Azure target resource.
:param pulumi.Input[str] endpoint_status: The status of the Endpoint, can be set to
either `Enabled` or `Disabled`. Defaults to `Enabled`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] geo_mappings: A list of Geographic Regions used to distribute traffic, such as `WORLD`, `UK` or `DE`. The same location can't be specified in two endpoints. [See the Geographic Hierarchies documentation for more information](https://docs.microsoft.com/en-us/rest/api/trafficmanager/geographichierarchies/getdefault).
:param pulumi.Input[int] min_child_endpoints: This argument specifies the minimum number
of endpoints that must be ‘online’ in the child profile in order for the
parent profile to direct traffic to any of the endpoints in that child
profile. This argument only applies to Endpoints of type `nestedEndpoints`
and has to be larger than `0`.
:param pulumi.Input[int] minimum_required_child_endpoints_ipv4: This argument specifies the minimum number of IPv4 (DNS record type A) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
:param pulumi.Input[int] minimum_required_child_endpoints_ipv6: This argument specifies the minimum number of IPv6 (DNS record type AAAA) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
:param pulumi.Input[str] name: The name of the Traffic Manager endpoint. Changing this forces a
new resource to be created.
:param pulumi.Input[int] priority: Specifies the priority of this Endpoint, this must be
specified for Profiles using the `Priority` traffic routing method. Supports
values between 1 and 1000, with no Endpoints sharing the same value. If
omitted the value will be computed in order of creation.
:param pulumi.Input[str] profile_name: The name of the Traffic Manager Profile to attach
create the Traffic Manager endpoint.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the Traffic Manager Profile exists.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointSubnetArgs']]]] subnets: One or more `subnet` blocks as defined below
:param pulumi.Input[str] target: The FQDN DNS name of the target. This argument must be
provided for an endpoint of type `externalEndpoints`, for other types it
will be computed.
:param pulumi.Input[str] target_resource_id: The resource id of an Azure resource to
target. This argument must be provided for an endpoint of type
`azureEndpoints` or `nestedEndpoints`.
:param pulumi.Input[str] type: The Endpoint type, must be one of:
- `azureEndpoints`
- `externalEndpoints`
- `nestedEndpoints`
:param pulumi.Input[int] weight: Specifies how much traffic should be distributed to this
endpoint, this must be specified for Profiles using the `Weighted` traffic
routing method. Supports values between 1 and 1000.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EndpointArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Traffic Manager Endpoint.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
import pulumi_random as random
server = random.RandomId("server",
keepers={
"azi_id": 1,
},
byte_length=8)
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_traffic_manager_profile = azure.network.TrafficManagerProfile("exampleTrafficManagerProfile",
resource_group_name=example_resource_group.name,
traffic_routing_method="Weighted",
dns_config=azure.network.TrafficManagerProfileDnsConfigArgs(
relative_name=server.hex,
ttl=100,
),
monitor_config=azure.network.TrafficManagerProfileMonitorConfigArgs(
protocol="http",
port=80,
path="/",
interval_in_seconds=30,
timeout_in_seconds=9,
tolerated_number_of_failures=3,
),
tags={
"environment": "Production",
})
example_traffic_manager_endpoint = azure.network.TrafficManagerEndpoint("exampleTrafficManagerEndpoint",
resource_group_name=example_resource_group.name,
profile_name=example_traffic_manager_profile.name,
type="externalEndpoints",
weight=100)
```
## Import
Traffic Manager Endpoints can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:trafficmanager/endpoint:Endpoint exampleEndpoints /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Network/trafficManagerProfiles/mytrafficmanagerprofile1/azureEndpoints/mytrafficmanagerendpoint
```
:param str resource_name: The name of the resource.
:param EndpointArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EndpointArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
custom_headers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointCustomHeaderArgs']]]]] = None,
endpoint_location: Optional[pulumi.Input[str]] = None,
endpoint_status: Optional[pulumi.Input[str]] = None,
geo_mappings: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
min_child_endpoints: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv4: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv6: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
profile_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointSubnetArgs']]]]] = None,
target: Optional[pulumi.Input[str]] = None,
target_resource_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None,
__props__=None):
pulumi.log.warn("""Endpoint is deprecated: azure.trafficmanager.Endpoint has been deprecated in favor of azure.network.TrafficManagerEndpoint""")
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EndpointArgs.__new__(EndpointArgs)
__props__.__dict__["custom_headers"] = custom_headers
__props__.__dict__["endpoint_location"] = endpoint_location
__props__.__dict__["endpoint_status"] = endpoint_status
__props__.__dict__["geo_mappings"] = geo_mappings
__props__.__dict__["min_child_endpoints"] = min_child_endpoints
__props__.__dict__["minimum_required_child_endpoints_ipv4"] = minimum_required_child_endpoints_ipv4
__props__.__dict__["minimum_required_child_endpoints_ipv6"] = minimum_required_child_endpoints_ipv6
__props__.__dict__["name"] = name
__props__.__dict__["priority"] = priority
if profile_name is None and not opts.urn:
raise TypeError("Missing required property 'profile_name'")
__props__.__dict__["profile_name"] = profile_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["subnets"] = subnets
__props__.__dict__["target"] = target
__props__.__dict__["target_resource_id"] = target_resource_id
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
__props__.__dict__["weight"] = weight
__props__.__dict__["endpoint_monitor_status"] = None
super(Endpoint, __self__).__init__(
'azure:trafficmanager/endpoint:Endpoint',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
custom_headers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointCustomHeaderArgs']]]]] = None,
endpoint_location: Optional[pulumi.Input[str]] = None,
endpoint_monitor_status: Optional[pulumi.Input[str]] = None,
endpoint_status: Optional[pulumi.Input[str]] = None,
geo_mappings: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
min_child_endpoints: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv4: Optional[pulumi.Input[int]] = None,
minimum_required_child_endpoints_ipv6: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
profile_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointSubnetArgs']]]]] = None,
target: Optional[pulumi.Input[str]] = None,
target_resource_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None) -> 'Endpoint':
"""
Get an existing Endpoint resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointCustomHeaderArgs']]]] custom_headers: One or more `custom_header` blocks as defined below
:param pulumi.Input[str] endpoint_location: Specifies the Azure location of the Endpoint,
this must be specified for Profiles using the `Performance` routing method
if the Endpoint is of either type `nestedEndpoints` or `externalEndpoints`.
For Endpoints of type `azureEndpoints` the value will be taken from the
location of the Azure target resource.
:param pulumi.Input[str] endpoint_status: The status of the Endpoint, can be set to
either `Enabled` or `Disabled`. Defaults to `Enabled`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] geo_mappings: A list of Geographic Regions used to distribute traffic, such as `WORLD`, `UK` or `DE`. The same location can't be specified in two endpoints. [See the Geographic Hierarchies documentation for more information](https://docs.microsoft.com/en-us/rest/api/trafficmanager/geographichierarchies/getdefault).
:param pulumi.Input[int] min_child_endpoints: This argument specifies the minimum number
of endpoints that must be ‘online’ in the child profile in order for the
parent profile to direct traffic to any of the endpoints in that child
profile. This argument only applies to Endpoints of type `nestedEndpoints`
and has to be larger than `0`.
:param pulumi.Input[int] minimum_required_child_endpoints_ipv4: This argument specifies the minimum number of IPv4 (DNS record type A) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
:param pulumi.Input[int] minimum_required_child_endpoints_ipv6: This argument specifies the minimum number of IPv6 (DNS record type AAAA) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
:param pulumi.Input[str] name: The name of the Traffic Manager endpoint. Changing this forces a
new resource to be created.
:param pulumi.Input[int] priority: Specifies the priority of this Endpoint, this must be
specified for Profiles using the `Priority` traffic routing method. Supports
values between 1 and 1000, with no Endpoints sharing the same value. If
omitted the value will be computed in order of creation.
:param pulumi.Input[str] profile_name: The name of the Traffic Manager Profile to attach
create the Traffic Manager endpoint.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the Traffic Manager Profile exists.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EndpointSubnetArgs']]]] subnets: One or more `subnet` blocks as defined below
:param pulumi.Input[str] target: The FQDN DNS name of the target. This argument must be
provided for an endpoint of type `externalEndpoints`, for other types it
will be computed.
:param pulumi.Input[str] target_resource_id: The resource id of an Azure resource to
target. This argument must be provided for an endpoint of type
`azureEndpoints` or `nestedEndpoints`.
:param pulumi.Input[str] type: The Endpoint type, must be one of:
- `azureEndpoints`
- `externalEndpoints`
- `nestedEndpoints`
:param pulumi.Input[int] weight: Specifies how much traffic should be distributed to this
endpoint, this must be specified for Profiles using the `Weighted` traffic
routing method. Supports values between 1 and 1000.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _EndpointState.__new__(_EndpointState)
__props__.__dict__["custom_headers"] = custom_headers
__props__.__dict__["endpoint_location"] = endpoint_location
__props__.__dict__["endpoint_monitor_status"] = endpoint_monitor_status
__props__.__dict__["endpoint_status"] = endpoint_status
__props__.__dict__["geo_mappings"] = geo_mappings
__props__.__dict__["min_child_endpoints"] = min_child_endpoints
__props__.__dict__["minimum_required_child_endpoints_ipv4"] = minimum_required_child_endpoints_ipv4
__props__.__dict__["minimum_required_child_endpoints_ipv6"] = minimum_required_child_endpoints_ipv6
__props__.__dict__["name"] = name
__props__.__dict__["priority"] = priority
__props__.__dict__["profile_name"] = profile_name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["subnets"] = subnets
__props__.__dict__["target"] = target
__props__.__dict__["target_resource_id"] = target_resource_id
__props__.__dict__["type"] = type
__props__.__dict__["weight"] = weight
return Endpoint(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="customHeaders")
def custom_headers(self) -> pulumi.Output[Optional[Sequence['outputs.EndpointCustomHeader']]]:
"""
One or more `custom_header` blocks as defined below
"""
return pulumi.get(self, "custom_headers")
@property
@pulumi.getter(name="endpointLocation")
def endpoint_location(self) -> pulumi.Output[str]:
"""
Specifies the Azure location of the Endpoint,
this must be specified for Profiles using the `Performance` routing method
if the Endpoint is of either type `nestedEndpoints` or `externalEndpoints`.
For Endpoints of type `azureEndpoints` the value will be taken from the
location of the Azure target resource.
"""
return pulumi.get(self, "endpoint_location")
@property
@pulumi.getter(name="endpointMonitorStatus")
def endpoint_monitor_status(self) -> pulumi.Output[str]:
return pulumi.get(self, "endpoint_monitor_status")
@property
@pulumi.getter(name="endpointStatus")
def endpoint_status(self) -> pulumi.Output[str]:
"""
The status of the Endpoint, can be set to
either `Enabled` or `Disabled`. Defaults to `Enabled`.
"""
return pulumi.get(self, "endpoint_status")
@property
@pulumi.getter(name="geoMappings")
def geo_mappings(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of Geographic Regions used to distribute traffic, such as `WORLD`, `UK` or `DE`. The same location can't be specified in two endpoints. [See the Geographic Hierarchies documentation for more information](https://docs.microsoft.com/en-us/rest/api/trafficmanager/geographichierarchies/getdefault).
"""
return pulumi.get(self, "geo_mappings")
@property
@pulumi.getter(name="minChildEndpoints")
def min_child_endpoints(self) -> pulumi.Output[Optional[int]]:
"""
This argument specifies the minimum number
of endpoints that must be ‘online’ in the child profile in order for the
parent profile to direct traffic to any of the endpoints in that child
profile. This argument only applies to Endpoints of type `nestedEndpoints`
and has to be larger than `0`.
"""
return pulumi.get(self, "min_child_endpoints")
@property
@pulumi.getter(name="minimumRequiredChildEndpointsIpv4")
def minimum_required_child_endpoints_ipv4(self) -> pulumi.Output[Optional[int]]:
"""
This argument specifies the minimum number of IPv4 (DNS record type A) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
"""
return pulumi.get(self, "minimum_required_child_endpoints_ipv4")
@property
@pulumi.getter(name="minimumRequiredChildEndpointsIpv6")
def minimum_required_child_endpoints_ipv6(self) -> pulumi.Output[Optional[int]]:
"""
This argument specifies the minimum number of IPv6 (DNS record type AAAA) endpoints that must be ‘online’ in the child profile in order for the parent profile to direct traffic to any of the endpoints in that child profile. This argument only applies to Endpoints of type `nestedEndpoints` and defaults to `1`.
"""
return pulumi.get(self, "minimum_required_child_endpoints_ipv6")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Traffic Manager endpoint. Changing this forces a
new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def priority(self) -> pulumi.Output[int]:
"""
Specifies the priority of this Endpoint, this must be
specified for Profiles using the `Priority` traffic routing method. Supports
values between 1 and 1000, with no Endpoints sharing the same value. If
omitted the value will be computed in order of creation.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="profileName")
def profile_name(self) -> pulumi.Output[str]:
"""
The name of the Traffic Manager Profile to attach
create the Traffic Manager endpoint.
"""
return pulumi.get(self, "profile_name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group where the Traffic Manager Profile exists.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter
def subnets(self) -> pulumi.Output[Optional[Sequence['outputs.EndpointSubnet']]]:
"""
One or more `subnet` blocks as defined below
"""
return pulumi.get(self, "subnets")
@property
@pulumi.getter
def target(self) -> pulumi.Output[str]:
"""
The FQDN DNS name of the target. This argument must be
provided for an endpoint of type `externalEndpoints`, for other types it
will be computed.
"""
return pulumi.get(self, "target")
@property
@pulumi.getter(name="targetResourceId")
def target_resource_id(self) -> pulumi.Output[Optional[str]]:
"""
The resource id of an Azure resource to
target. This argument must be provided for an endpoint of type
`azureEndpoints` or `nestedEndpoints`.
"""
return pulumi.get(self, "target_resource_id")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The Endpoint type, must be one of:
- `azureEndpoints`
- `externalEndpoints`
- `nestedEndpoints`
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def weight(self) -> pulumi.Output[int]:
"""
Specifies how much traffic should be distributed to this
endpoint, this must be specified for Profiles using the `Weighted` traffic
routing method. Supports values between 1 and 1000.
"""
return pulumi.get(self, "weight")
| 53.166227
| 382
| 0.674194
| 7,244
| 60,450
| 5.453341
| 0.044589
| 0.071005
| 0.066854
| 0.037313
| 0.956561
| 0.948917
| 0.93793
| 0.931475
| 0.925172
| 0.913604
| 0
| 0.005875
| 0.239785
| 60,450
| 1,136
| 383
| 53.213028
| 0.853748
| 0.455385
| 0
| 0.831502
| 1
| 0
| 0.134512
| 0.053586
| 0
| 0
| 0
| 0
| 0
| 1
| 0.164835
| false
| 0.001832
| 0.012821
| 0.003663
| 0.276557
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3184c69ce1d516c7832445a034a9172678bc183a
| 183
|
py
|
Python
|
Utilities/Templates/Modules/ScriptedSegmentEditorEffect/SegmentEditorTemplateKeyLib/__init__.py
|
forfullstack/slicersources-src
|
91bcecf037a27f3fad4c0ab57e8286fc258bb0f5
|
[
"Apache-2.0"
] | null | null | null |
Utilities/Templates/Modules/ScriptedSegmentEditorEffect/SegmentEditorTemplateKeyLib/__init__.py
|
forfullstack/slicersources-src
|
91bcecf037a27f3fad4c0ab57e8286fc258bb0f5
|
[
"Apache-2.0"
] | null | null | null |
Utilities/Templates/Modules/ScriptedSegmentEditorEffect/SegmentEditorTemplateKeyLib/__init__.py
|
forfullstack/slicersources-src
|
91bcecf037a27f3fad4c0ab57e8286fc258bb0f5
|
[
"Apache-2.0"
] | null | null | null |
from SegmentEditorEffects.AbstractScriptedSegmentEditorEffect import *
from SegmentEditorEffects.AbstractScriptedSegmentEditorLabelEffect import *
from .SegmentEditorEffect import *
| 36.6
| 75
| 0.896175
| 11
| 183
| 14.909091
| 0.545455
| 0.292683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071038
| 183
| 4
| 76
| 45.75
| 0.964706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
31870a7e37465cacd2177dd504f66eaf5204c1f3
| 1,237
|
py
|
Python
|
problem008.py
|
kenteroshima/project_euler_answers
|
9af9cd53319b70e8a624353356e2b890e72ab625
|
[
"MIT"
] | null | null | null |
problem008.py
|
kenteroshima/project_euler_answers
|
9af9cd53319b70e8a624353356e2b890e72ab625
|
[
"MIT"
] | null | null | null |
problem008.py
|
kenteroshima/project_euler_answers
|
9af9cd53319b70e8a624353356e2b890e72ab625
|
[
"MIT"
] | null | null | null |
#0.04s user 0.02s system 63% cpu 0.086 total
numbers = '7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450'
maxnum = 0
for x in range(len(numbers)-13):
sum = 1
for y in range(13):
sum *= int(numbers[x+y])
if sum > maxnum:
maxnum = sum
print(maxnum)
| 95.153846
| 1,012
| 0.915117
| 41
| 1,237
| 27.609756
| 0.585366
| 0.012367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.875322
| 0.059822
| 1,237
| 12
| 1,013
| 103.083333
| 0.098022
| 0.034762
| 0
| 0
| 0
| 0
| 0.838223
| 0.838223
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
319f4d1831aa77ed8d80015b092a65f19a98d690
| 83
|
py
|
Python
|
module3/lab3_1_1_3.py
|
dzooli/pcep_prepare
|
ddf34991a2d6ef2cfe3bda706ec333e9caa2aea5
|
[
"MIT"
] | null | null | null |
module3/lab3_1_1_3.py
|
dzooli/pcep_prepare
|
ddf34991a2d6ef2cfe3bda706ec333e9caa2aea5
|
[
"MIT"
] | null | null | null |
module3/lab3_1_1_3.py
|
dzooli/pcep_prepare
|
ddf34991a2d6ef2cfe3bda706ec333e9caa2aea5
|
[
"MIT"
] | null | null | null |
var = 0
print(var == 0)
print(var == 0.)
var = 1
print(var == 0)
print(var != 1.1)
| 11.857143
| 17
| 0.542169
| 17
| 83
| 2.647059
| 0.235294
| 0.355556
| 0.6
| 0.8
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107692
| 0.216867
| 83
| 6
| 18
| 13.833333
| 0.584615
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
31d6196aca9451726a19a36ae7e869af9b25bac9
| 6,543
|
py
|
Python
|
background/compute_ranklist.py
|
B-ROY/TESTGIT
|
40221cf254c90d37d21afb981635740aebf11949
|
[
"Apache-2.0"
] | 2
|
2017-12-02T13:58:30.000Z
|
2018-08-02T17:07:59.000Z
|
background/compute_ranklist.py
|
B-ROY/TESTGIT
|
40221cf254c90d37d21afb981635740aebf11949
|
[
"Apache-2.0"
] | null | null | null |
background/compute_ranklist.py
|
B-ROY/TESTGIT
|
40221cf254c90d37d21afb981635740aebf11949
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import datetime
PROJECT_ROOT = os.path.realpath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(PROJECT_ROOT, os.pardir))
sys.path.append(os.path.abspath(os.path.join(os.path.abspath(__file__), '../')))
sys.path.append(os.path.abspath(os.path.join(os.path.abspath(__file__), '../push_util')))
sys.path.append(os.path.abspath(os.path.join(os.path.abspath(__file__), '../../')))
sys.path.append(os.path.abspath(os.path.join(os.path.abspath(__file__), '../../..')))
sys.path.append(os.path.abspath(os.path.join(os.path.abspath(__file__), '../../../..')))
from base.settings import load_django_settings
load_django_settings('live_video.base', 'live_video.app')
from app.customer.models.account import *
from app.customer.models.rank import *
from operator import attrgetter
def compute_7_rank_list_first():
CharmRank.drop_collection()
WealthRank.drop_collection()
now_time = datetime.datetime.now()
end_time = datetime.datetime(now_time.year, now_time.month, now_time.day, 3,0,0)
start_time = end_time - datetime.timedelta(days=7)
# 现在只是送礼的时候增加魅力
charm_record_list = TradeTicketRecord.objects.filter(created_time__gte=start_time, created_time__lt=end_time,
trade_type=TradeTicketRecord.TradeTypeGift)
wealth_record_list = TradeDiamondRecord.objects.filter(created_time__gte=start_time, created_time__lt=end_time,
trade_type=TradeDiamondRecord.TradeTypeGift)
charm_rank_list = {}
wealth_rank_list = {}
#循环计算榜单
for charm_record in charm_record_list:
if charm_record.user.id in charm_rank_list:
charm_rank_list[charm_record.user.id].charm = charm_rank_list[charm_record.user.id].charm +charm_record.ticket/10
else:
charm_rank = CharmRank(user=charm_record.user, charm=charm_record.ticket/10)
charm_rank_list[charm_record.user.id] = charm_rank
for wealth_record in wealth_record_list:
if wealth_record.user.id in wealth_rank_list:
wealth_rank_list[wealth_record.user.id].wealth = wealth_rank_list[wealth_record.user.id].wealth + wealth_record.diamon/10
else:
wealth_rank = WealthRank(user=wealth_record.user, wealth=wealth_record.diamon/10)
wealth_rank_list[wealth_record.user.id] = wealth_rank
charmlist = charm_rank_list.values()
wealthlist = wealth_rank_list.values()
charmlist.sort(key=attrgetter("charm"), reverse=True)
wealthlist.sort(key=attrgetter("wealth"), reverse=True)
for i in range(0, len(charmlist)):
if i > 29:
break
print charmlist[i].rank
charmlist[i].rank = i + 1
charmlist[i].change_status = 0
print type(charmlist[i].rank)
charmlist[i].save()
for i in range(0, len(wealthlist)):
if i > 29:
break
wealthlist[i].rank = i + 1
wealthlist[i].change_status = 0
wealthlist[i].save()
def compute_7_rank_list_delta():
now_time = datetime.datetime.now()
end_time = datetime.datetime(now_time.year, now_time.month, now_time.day, 3,0,0)
start_time = end_time - datetime.timedelta(days=7)
# 现在只是送礼的时候增加魅力
charm_record_list = TradeTicketRecord.objects.filter(created_time__gte=start_time, created_time__lt=end_time,
trade_type=TradeTicketRecord.TradeTypeGift)
wealth_record_list = TradeDiamondRecord.objects.filter(created_time__gte=start_time, created_time__lt=end_time,
trade_type=TradeDiamondRecord.TradeTypeGift)
charm_rank_list = {}
wealth_rank_list = {}
#循环计算榜单
for charm_record in charm_record_list:
if charm_record.user.id in charm_rank_list:
charm_rank_list[charm_record.user.id].charm = charm_rank_list[charm_record.user.id].charm +charm_record.ticket/10
else:
charm_rank = CharmRank(user=charm_record.user, charm=charm_record.ticket/10)
charm_rank_list[charm_record.user.id] = charm_rank
for wealth_record in wealth_record_list:
if wealth_record.user.id in wealth_rank_list:
wealth_rank_list[wealth_record.user.id].wealth = wealth_rank_list[wealth_record.user.id].wealth + wealth_record.diamon/10
else:
wealth_rank = WealthRank(user=wealth_record.user, wealth=wealth_record.diamon/10)
wealth_rank_list[wealth_record.user.id] = wealth_rank
charmlist = charm_rank_list.values()
wealthlist = wealth_rank_list.values()
charmlist.sort(key=attrgetter("charm"), reverse=True)
wealthlist.sort(key=attrgetter("wealth"), reverse=True)
# 上周user_list
old_charm_user_ids = {}
old_wealth_user_ids = {}
old_charm_ranks = CharmRank.objects.all()
old_wealth_ranks = WealthRank.objects.all()
for old_charm in old_charm_ranks:
old_charm_user_ids[old_charm.user.id] = old_charm.rank
for old_wealth in old_wealth_ranks:
old_wealth_user_ids[old_wealth.user.id] = old_wealth.rank
CharmRank.drop_collection()
WealthRank.drop_collection()
print old_charm_user_ids
for i in range(0, len(charmlist)):
if i > 29:
break
rank = i + 1
u_id = charmlist[i].user.id
charmlist[i].rank = rank
db_charm = charmlist[i].save()
db_charm.update(set__change_status=0)
if int(u_id) not in old_charm_user_ids:
db_charm.update(set__change_status=1)
else:
old_rank = old_charm_user_ids[u_id]
if old_rank > rank:
db_charm.update(set__change_status=1)
elif old_rank < rank:
db_charm.update(set__change_status=2)
for i in range(0, len(wealthlist)):
if i > 29:
break
rank = i + 1
wealthlist[i].rank = rank
u_id = wealthlist[i].user.id
db_wealth = wealthlist[i].save()
db_wealth.update(set__change_status=0)
if u_id not in old_wealth_user_ids:
db_wealth.update(set__change_status=1)
else:
old_rank = old_wealth_user_ids[u_id]
if old_rank > rank:
db_wealth.update(set__change_status=1)
elif old_rank < rank:
db_wealth.update(set__change_status=2)
if __name__ == '__main__':
# compute_7_rank_list_first()
compute_7_rank_list_delta()
| 38.715976
| 133
| 0.668806
| 886
| 6,543
| 4.594808
| 0.125282
| 0.055023
| 0.047163
| 0.033407
| 0.818472
| 0.751904
| 0.713338
| 0.706706
| 0.706706
| 0.661017
| 0
| 0.011076
| 0.227266
| 6,543
| 168
| 134
| 38.946429
| 0.794106
| 0.015436
| 0
| 0.609375
| 0
| 0
| 0.015387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.054688
| null | null | 0.023438
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31de7684e609ac4faad4b64471410ca9a761e519
| 30,193
|
py
|
Python
|
regular_language/unit_tests/test_ast_AST_clone_subtree.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | 1
|
2021-12-03T07:20:18.000Z
|
2021-12-03T07:20:18.000Z
|
regular_language/unit_tests/test_ast_AST_clone_subtree.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | null | null | null |
regular_language/unit_tests/test_ast_AST_clone_subtree.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Unit Tests for ast.AST.clone_subtree() """
import unittest
import nlpregex.regular_language.ast
from nlpregex.regular_language.unit_tests.test_ast_helper import test_AST_helper
class test_ast_AST_clone_subtree( unittest.TestCase ):
def __init__( self, *args, **kwargs ):
unittest.TestCase.__init__(self, *args, **kwargs)
self.helper = test_AST_helper()
def construct_ast_from_spec( self, spec01 ):
return self.helper.construct_ast_from_spec(spec01)
def display_tree( self, ast01 ):
return self.helper.display_tree(ast01)
def compare_specs( self, spec01, spec02 ):
return self.helper.compare_specs( spec01, spec02 )
def test_0001(self):
spec01 = 'T_001'
node_spec01 = 'T_001'
spec_expected_01 = 'T_001'
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0002(self):
spec01 = 'N_001'
node_spec01 = 'N_001'
spec_expected_01 = 'N_001'
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0003(self):
spec01 = 'E_001'
node_spec01 = 'E_001'
spec_expected_01 = 'E_001'
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0004(self):
spec01 = 'S_001:T_001'
node_spec01 = 'T_001'
spec_expected_01 = 'T_001'
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0005(self):
spec01 = 'S_001:T_001'
node_spec01 = 'S_001'
spec_expected_01 = 'S_001:T_001'
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0006(self):
spec01 = 'S_001:T_001 T_002'
node_spec01 = 'S_001'
spec_expected_01 = 'S_001:T_001 T_002'
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0007(self):
spec01 = '''
S_001:T_001 |_002
|_002:T_003 T_004
'''
node_spec01 = 'S_001'
spec_expected_01 = '''
S_001:T_001 |_002
|_002:T_003 T_004
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0008(self):
spec01 = '''
S_001:T_001 |_002
|_002:T_003 T_004
'''
node_spec01 = '|_002'
spec_expected_01 = '''
|_002:T_003 T_004
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0009(self):
spec01 = '''
S_001:T_001 |_002
|_002:T_003 T_004
'''
node_spec01 = 'T_001'
spec_expected_01 = '''
T_001
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0010(self):
spec01 = '''
S_001:T_001 |_002
|_002:T_003 T_004
'''
node_spec01 = 'T_003'
spec_expected_01 = '''
T_003
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0011(self):
spec01 = '''
S_001:T_001 |_002
|_002:T_003 T_004
'''
node_spec01 = 'T_004'
spec_expected_01 = '''
T_004
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0012(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'S_001'
spec_expected_01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0013(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_002'
spec_expected_01 = '''
T_002
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0014(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = '|_003'
spec_expected_01 = '''
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0015(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_004'
spec_expected_01 = '''
T_004
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0016(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'S_005'
spec_expected_01 = '''
S_005:?_008 T_009 +_010
?_008:S_016
+_010:S_017
S_016:T_020 T_021
S_017:E_022 T_023
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0017(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'S_006'
spec_expected_01 = '''
S_006:{2,3}_011 *_012 T_013
{2,3}_011:S_018
*_012:S_019
S_018:T_024 T_025
S_019:T_026 E_027
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0018(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'S_007'
spec_expected_01 = '''
S_007:T_014 T_015
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0019(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = '?_008'
spec_expected_01 = '''
?_008:S_016
S_016:T_020 T_021
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0020(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = '+_010'
spec_expected_01 = '''
+_010:S_017
S_017:E_022 T_023
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0021(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = '{2,3}_011'
spec_expected_01 = '''
{2,3}_011:S_018
S_018:T_024 T_025
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0022(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = '*_012'
spec_expected_01 = '''
*_012:S_019
S_019:T_026 E_027
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0023(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_013'
spec_expected_01 = '''
T_013
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0024(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_014'
spec_expected_01 = '''
T_014
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0025(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_015'
spec_expected_01 = '''
T_015
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0026(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'S_016'
spec_expected_01 = '''
S_016:T_020 T_021
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0027(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'S_017'
spec_expected_01 = '''
S_017:E_022 T_023
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0028(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'S_018'
spec_expected_01 = '''
S_018:T_024 T_025
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0029(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'S_019'
spec_expected_01 = '''
S_019:T_026 E_027
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0030(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_020'
spec_expected_01 = '''
T_020
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0031(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_021'
spec_expected_01 = '''
T_021
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0032(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'E_022'
spec_expected_01 = '''
E_022
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0033(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_023'
spec_expected_01 = '''
T_023
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0034(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_024'
spec_expected_01 = '''
T_024
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0035(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_025'
spec_expected_01 = '''
T_025
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0036(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'T_026'
spec_expected_01 = '''
T_026
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
def test_0037(self):
spec01 = '''
S_001:T_002 |_003 T_004
|_003:S_005 S_006 S_007
S_005:?_008 T_009 +_010
S_006:{2,3}_011 *_012 T_013
S_007:T_014 T_015
?_008:S_016
+_010:S_017
{2,3}_011:S_018
*_012:S_019
S_016:T_020 T_021
S_017:E_022 T_023
S_018:T_024 T_025
S_019:T_026 E_027
'''
node_spec01 = 'E_027'
spec_expected_01 = '''
E_027
'''
ast01 = self.construct_ast_from_spec(spec01)
node01 = self.helper.get_node(ast01, node_spec01)
ast02 = ast01.clone_subtree(node01)
spec02 = self.display_tree(ast02)
self.assertEqual( self.compare_specs(spec_expected_01, spec02), True )
if __name__ == '__main__':
unittest.main()
| 28.457116
| 82
| 0.516345
| 4,101
| 30,193
| 3.357718
| 0.029017
| 0.05374
| 0.075236
| 0.056645
| 0.910603
| 0.908061
| 0.903849
| 0.903849
| 0.900363
| 0.899201
| 0
| 0.285203
| 0.398337
| 30,193
| 1,060
| 83
| 28.483962
| 0.472806
| 0.002716
| 0
| 0.876364
| 0
| 0
| 0.458252
| 0
| 0
| 0
| 0
| 0
| 0.044848
| 1
| 0.049697
| false
| 0
| 0.003636
| 0.003636
| 0.058182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
31e83c7c52fbd99269a62f4d954d6794af74d0fe
| 54,790
|
py
|
Python
|
operations/accounts/management/commands/set_user_permissions.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
operations/accounts/management/commands/set_user_permissions.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
operations/accounts/management/commands/set_user_permissions.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import Group, Permission
from django.contrib.contenttypes.models import ContentType
class Command(BaseCommand):
help = 'Set Group Permissions'
def handle(self, *args, **kwargs):
#do stuff
Permission.objects.all().delete()
fleet_models = ['FileUpload', 'FuelAllocation', 'FuelCard', 'Incident', 'InsuranceClaim', 'MileageLog', 'RenewLicenceDisk', 'ServiceBooking',
'Trafficfine', 'Trip', 'TripLog', 'Vehicle', 'VehicleAllocation', 'VehicleExtras', 'VehicleMaintenance', 'VehicleMakeAndModel', 'Comment','Requisition',
'RequisitionItem', 'FuelTransfer']
offices_models = ['Branch', 'Document', 'ElectricityMeterNumber', 'ElectricityPurchase', 'Floor', 'MobilePurchase', 'Region', 'Section',
'TelcomPABXContract', 'TelcomPABXContractRenewal']
propfac_models = ['PFDocument', 'LeaseAgreement', 'LeaseAgreementRenewal', 'PFComment', 'PFRequisition',
'PFRequisitionItem', 'PropertyMaintenance','OfficeInspection', 'ToiletInspection',]
stock_models = ['STComment', 'STDocument', 'StockItem', 'StockReplenishment', 'StockTake', 'STRequisition',
'STRequisitionItem', 'BranchStock',]
model_list = {'fleet':fleet_models, 'offices':offices_models, 'propfac':propfac_models, 'stock':stock_models }
p_types = ['view', 'create', 'edit', 'authorize']
for app, models in model_list.items():
for m in models:
print app,m
content_type = ContentType.objects.get_or_create(app_label=app, model=m)[0]
for p_type in p_types:
Permission.objects.get_or_create(codename='%s_%s' % (p_type, m.lower()),
name='%s %s' % (p_type.upper(), m),
content_type=content_type)
content_type = ContentType.objects.get_or_create(app_label="fleet", model=m)[0]
Permission.objects.get_or_create(codename='view_reports', name='View Reports', content_type=content_type)
Permission.objects.get_or_create(codename='send_sms', name='Send SMS', content_type=content_type)
ops_executive, exists = Group.objects.get_or_create(name='Operations Executive')
ops_manager, exists = Group.objects.get_or_create(name='Operations Manager')
ops_fleet, exists = Group.objects.get_or_create(name='Operations Fleet')
ops_assistant, exists = Group.objects.get_or_create(name='Operations Assistant')
ops_assets, exists = Group.objects.get_or_create(name='Operations Assets')
ops_stock, exists = Group.objects.get_or_create(name='Operations Stock')
ops_offices, exists = Group.objects.get_or_create(name='Operations Offices')
ops_property, exists = Group.objects.get_or_create(name='Operations Property And Facilities')
r_perms = Permission.objects.filter(codename='view_reports').values_list('id', flat=True)
for perm in r_perms:
ops_executive.permissions.add(perm)
ops_manager.permissions.add(perm)
s_perms = Permission.objects.filter(codename='send_sms').values_list('id', flat=True)
for perm in s_perms:
ops_executive.permissions.add(perm)
ops_manager.permissions.add(perm)
ops_fleet.permissions.add(perm)
ops_assistant.permissions.add(perm)
ops_assets.permissions.add(perm)
ops_stock.permissions.add(perm)
ops_offices.permissions.add(perm)
ops_property.permissions.add(perm)
#Operations Executive (Fleet)
ops_executive.permissions.add(Permission.objects.get(codename='view_fileupload').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_fileupload').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_fileupload').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_fileupload').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_fuelallocation').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_fuelallocation').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_fuelallocation').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_fuelallocation').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_fueltransfer').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_fueltransfer').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_fueltransfer').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_fueltransfer').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_fuelcard').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_fuelcard').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_fuelcard').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_fuelcard').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_incident').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_incident').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_incident').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_incident').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_insuranceclaim').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_insuranceclaim').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_insuranceclaim').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_insuranceclaim').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_mileagelog').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_mileagelog').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_mileagelog').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_mileagelog').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_renewlicencedisk').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_renewlicencedisk').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_renewlicencedisk').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_renewlicencedisk').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_servicebooking').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_servicebooking').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_servicebooking').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_servicebooking').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_trafficfine').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_trafficfine').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_trafficfine').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_trafficfine').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_trip').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_trip').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_trip').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_trip').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_triplog').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_triplog').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_triplog').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_triplog').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_vehicle').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_vehicle').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_vehicle').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_vehicle').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_vehicleallocation').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_vehicleallocation').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_vehicleallocation').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_vehicleallocation').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_vehiclemaintenance').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_vehiclemaintenance').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_vehiclemaintenance').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_vehiclemaintenance').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_comment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_comment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_comment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_comment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_requisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_requisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_requisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_requisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_requisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_requisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_requisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_requisitionitem').pk)
#Operations Executive (Offices)
ops_executive.permissions.add(Permission.objects.get(codename='view_telcompabxcontractrenewal').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_telcompabxcontractrenewal').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_telcompabxcontractrenewal').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_telcompabxcontractrenewal').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_telcompabxcontract').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_telcompabxcontract').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_telcompabxcontract').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_telcompabxcontract').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_section').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_section').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_section').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_section').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_region').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_region').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_region').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_region').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_mobilepurchase').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_mobilepurchase').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_mobilepurchase').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_mobilepurchase').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_floor').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_floor').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_floor').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_floor').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_electricitypurchase').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_electricitypurchase').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_electricitypurchase').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_electricitypurchase').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_electricitymeternumber').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_electricitymeternumber').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_electricitymeternumber').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_electricitymeternumber').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_document').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_document').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_document').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_document').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_branch').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_branch').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_branch').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_branch').pk)
# Operations Executive (propfac)
ops_executive.permissions.add(Permission.objects.get(codename='view_toiletinspection').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_toiletinspection').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_toiletinspection').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_toiletinspection').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_officeinspection').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_officeinspection').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_officeinspection').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_officeinspection').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_propertymaintenance').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_propertymaintenance').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_propertymaintenance').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_propertymaintenance').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_pfrequisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_pfrequisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_pfrequisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_pfrequisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_pfrequisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_pfrequisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_pfrequisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_pfrequisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_pfcomment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_pfcomment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_pfcomment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_pfcomment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_leaseagreementrenewal').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_leaseagreementrenewal').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_leaseagreementrenewal').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_leaseagreementrenewal').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_leaseagreement').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_leaseagreement').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_leaseagreement').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_leaseagreement').pk)
# Operations Executive (stock)
ops_executive.permissions.add(Permission.objects.get(codename='view_stcomment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_stcomment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_stcomment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_stcomment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_stdocument').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_stdocument').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_stdocument').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_stdocument').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_stockitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_stockitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_stockitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_stockitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_stockreplenishment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_stockreplenishment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_stockreplenishment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_stockreplenishment').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_stocktake').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_stocktake').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_stocktake').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_stocktake').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_strequisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_strequisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_strequisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_strequisition').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_strequisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_strequisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_strequisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_strequisitionitem').pk)
ops_executive.permissions.add(Permission.objects.get(codename='view_branchstock').pk)
ops_executive.permissions.add(Permission.objects.get(codename='create_branchstock').pk)
ops_executive.permissions.add(Permission.objects.get(codename='edit_branchstock').pk)
ops_executive.permissions.add(Permission.objects.get(codename='authorize_branchstock').pk)
#Operations Manager (fleet)
ops_manager.permissions.add(Permission.objects.get(codename='view_fileupload').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_fileupload').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_fileupload').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_fuelallocation').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_fuelallocation').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_fuelallocation').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_fueltransfer').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_fueltransfer').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_fueltransfer').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_fuelcard').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_fuelcard').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_fuelcard').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_incident').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_incident').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_incident').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_insuranceclaim').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_insuranceclaim').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_insuranceclaim').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_mileagelog').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_mileagelog').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_mileagelog').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_renewlicencedisk').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_renewlicencedisk').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_renewlicencedisk').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_servicebooking').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_servicebooking').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_servicebooking').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_trafficfine').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_trafficfine').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_trafficfine').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_trip').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_trip').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_trip').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_triplog').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_triplog').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_triplog').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_vehicle').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_vehicle').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_vehicle').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_vehicleallocation').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_vehicleallocation').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_vehicleallocation').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_vehiclemaintenance').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_vehiclemaintenance').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_vehiclemaintenance').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_comment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_comment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_comment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_requisition').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_requisition').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_requisition').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_requisitionitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_requisitionitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_requisitionitem').pk)
# Operations Manager (Offices)
ops_manager.permissions.add(Permission.objects.get(codename='view_telcompabxcontractrenewal').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_telcompabxcontractrenewal').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_telcompabxcontractrenewal').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_telcompabxcontract').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_telcompabxcontract').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_telcompabxcontract').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_section').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_section').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_section').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_region').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_region').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_region').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_mobilepurchase').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_mobilepurchase').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_mobilepurchase').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_floor').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_floor').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_floor').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_electricitypurchase').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_electricitypurchase').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_electricitypurchase').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_electricitymeternumber').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_electricitymeternumber').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_electricitymeternumber').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_document').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_document').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_document').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_branch').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_branch').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_branch').pk)
# Operations Manager (propfac)
ops_manager.permissions.add(Permission.objects.get(codename='view_toiletinspection').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_toiletinspection').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_toiletinspection').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_officeinspection').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_officeinspection').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_officeinspection').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_propertymaintenance').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_propertymaintenance').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_propertymaintenance').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_pfrequisitionitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_pfrequisitionitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_pfrequisitionitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_pfrequisition').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_pfrequisition').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_pfrequisition').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_pfcomment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_pfcomment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_pfcomment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_leaseagreementrenewal').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_leaseagreementrenewal').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_leaseagreementrenewal').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_leaseagreement').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_leaseagreement').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_leaseagreement').pk)
# Operations Manager (stock)
ops_manager.permissions.add(Permission.objects.get(codename='view_stcomment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_stcomment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_stcomment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_stdocument').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_stdocument').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_stdocument').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_stockitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_stockitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_stockitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_stockreplenishment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_stockreplenishment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_stockreplenishment').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_stocktake').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_stocktake').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_stocktake').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_strequisition').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_strequisition').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_strequisition').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_strequisitionitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_strequisitionitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_strequisitionitem').pk)
ops_manager.permissions.add(Permission.objects.get(codename='view_branchstock').pk)
ops_manager.permissions.add(Permission.objects.get(codename='create_branchstock').pk)
ops_manager.permissions.add(Permission.objects.get(codename='edit_branchstock').pk)
#Operations Fleet (Fleet)
ops_fleet.permissions.add(Permission.objects.get(codename='view_fileupload').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_fileupload').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_fuelallocation').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_fuelallocation').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_fueltransfer').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_fueltransfer').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_fuelcard').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_fuelcard').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_incident').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_incident').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_insuranceclaim').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_insuranceclaim').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_mileagelog').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_mileagelog').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_renewlicencedisk').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_renewlicencedisk').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_servicebooking').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_servicebooking').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_trafficfine').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_trafficfine').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_trip').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_trip').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_triplog').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_triplog').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_vehicle').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_vehicle').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_vehicleallocation').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_vehicleallocation').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_vehiclemaintenance').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_vehiclemaintenance').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_comment').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_comment').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_requisition').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_requisition').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='view_requisitionitem').pk)
ops_fleet.permissions.add(Permission.objects.get(codename='create_requisitionitem').pk)
#Operations Assistant (Fleet)
ops_assistant.permissions.add(Permission.objects.get(codename='view_fileupload').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_fileupload').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_fuelallocation').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_fuelallocation').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_fueltransfer').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_fueltransfer').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_fuelcard').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_fuelcard').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_incident').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_incident').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_insuranceclaim').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_insuranceclaim').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_mileagelog').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_mileagelog').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_renewlicencedisk').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_renewlicencedisk').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_servicebooking').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_servicebooking').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_trafficfine').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_trafficfine').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_trip').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_trip').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_triplog').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_triplog').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_vehicle').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_vehicle').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_vehicleallocation').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_vehicleallocation').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_vehiclemaintenance').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_vehiclemaintenance').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_comment').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_comment').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_requisition').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_requisition').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_requisitionitem').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_requisitionitem').pk)
# Operations Assistant (Offices)
ops_assistant.permissions.add(Permission.objects.get(codename='view_telcompabxcontractrenewal').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_telcompabxcontractrenewal').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_telcompabxcontract').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_telcompabxcontract').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_section').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_section').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_region').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_region').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_mobilepurchase').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_mobilepurchase').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_floor').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_floor').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_electricitypurchase').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_electricitypurchase').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_electricitymeternumber').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_electricitymeternumber').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_document').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_document').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_branch').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_branch').pk)
# Operations Assistant (propfac)
ops_assistant.permissions.add(Permission.objects.get(codename='view_toiletinspection').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_toiletinspection').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_officeinspection').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_officeinspection').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_propertymaintenance').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_propertymaintenance').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_pfrequisitionitem').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_pfrequisitionitem').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_pfrequisition').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_pfrequisition').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_pfcomment').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_pfcomment').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_leaseagreementrenewal').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_leaseagreementrenewal').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='view_leaseagreement').pk)
ops_assistant.permissions.add(Permission.objects.get(codename='create_leaseagreement').pk)
# Operations Offices (Offices)
ops_offices.permissions.add(Permission.objects.get(codename='view_telcompabxcontractrenewal').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_telcompabxcontractrenewal').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_telcompabxcontract').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_telcompabxcontract').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_section').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_section').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_region').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_region').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_mobilepurchase').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_mobilepurchase').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_floor').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_floor').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_electricitypurchase').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_electricitypurchase').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_electricitymeternumber').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_electricitymeternumber').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_document').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_document').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_branch').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_branch').pk)
# Operations Offices (propfac)
ops_offices.permissions.add(Permission.objects.get(codename='view_toiletinspection').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_toiletinspection').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_officeinspection').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_officeinspection').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_propertymaintenance').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_propertymaintenance').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_pfrequisitionitem').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_pfrequisitionitem').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_pfrequisition').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_pfrequisition').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_pfcomment').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_pfcomment').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_leaseagreementrenewal').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_leaseagreementrenewal').pk)
ops_offices.permissions.add(Permission.objects.get(codename='view_leaseagreement').pk)
ops_offices.permissions.add(Permission.objects.get(codename='create_leaseagreement').pk)
# Operations Property And Facilities (Offices)
ops_property.permissions.add(Permission.objects.get(codename='view_telcompabxcontractrenewal').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_telcompabxcontractrenewal').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_telcompabxcontract').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_telcompabxcontract').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_section').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_section').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_region').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_region').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_mobilepurchase').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_mobilepurchase').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_floor').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_floor').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_electricitypurchase').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_electricitypurchase').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_electricitymeternumber').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_electricitymeternumber').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_document').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_document').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_branch').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_branch').pk)
# Operations Property And Facilities (propfac)
ops_property.permissions.add(Permission.objects.get(codename='view_toiletinspection').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_toiletinspection').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_officeinspection').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_officeinspection').pk)
ops_property.permissions.add(Permission.objects.get(codename='edit_officeinspection').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_propertymaintenance').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_propertymaintenance').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_pfrequisitionitem').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_pfrequisitionitem').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_pfrequisition').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_pfrequisition').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_pfcomment').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_pfcomment').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_leaseagreementrenewal').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_leaseagreementrenewal').pk)
ops_property.permissions.add(Permission.objects.get(codename='view_leaseagreement').pk)
ops_property.permissions.add(Permission.objects.get(codename='create_leaseagreement').pk)
# Operations assets
ops_assets.permissions.add(Permission.objects.get(codename='view_stcomment').pk)
ops_assets.permissions.add(Permission.objects.get(codename='create_stcomment').pk)
ops_assets.permissions.add(Permission.objects.get(codename='view_stdocument').pk)
ops_assets.permissions.add(Permission.objects.get(codename='create_stdocument').pk)
ops_assets.permissions.add(Permission.objects.get(codename='view_stockitem').pk)
ops_assets.permissions.add(Permission.objects.get(codename='create_stockitem').pk)
ops_assets.permissions.add(Permission.objects.get(codename='view_stockreplenishment').pk)
ops_assets.permissions.add(Permission.objects.get(codename='create_stockreplenishment').pk)
ops_assets.permissions.add(Permission.objects.get(codename='view_stocktake').pk)
ops_assets.permissions.add(Permission.objects.get(codename='create_stocktake').pk)
ops_assets.permissions.add(Permission.objects.get(codename='view_strequisition').pk)
ops_assets.permissions.add(Permission.objects.get(codename='create_strequisition').pk)
ops_assets.permissions.add(Permission.objects.get(codename='view_strequisitionitem').pk)
ops_assets.permissions.add(Permission.objects.get(codename='create_strequisitionitem').pk)
ops_assets.permissions.add(Permission.objects.get(codename='view_branchstock').pk)
ops_assets.permissions.add(Permission.objects.get(codename='create_branchstock').pk)
# Operations stock
ops_stock.permissions.add(Permission.objects.get(codename='view_stcomment').pk)
ops_stock.permissions.add(Permission.objects.get(codename='create_stcomment').pk)
ops_stock.permissions.add(Permission.objects.get(codename='view_stdocument').pk)
ops_stock.permissions.add(Permission.objects.get(codename='create_stdocument').pk)
ops_stock.permissions.add(Permission.objects.get(codename='view_stockitem').pk)
ops_stock.permissions.add(Permission.objects.get(codename='create_stockitem').pk)
ops_stock.permissions.add(Permission.objects.get(codename='view_stockreplenishment').pk)
ops_stock.permissions.add(Permission.objects.get(codename='create_stockreplenishment').pk)
ops_stock.permissions.add(Permission.objects.get(codename='view_stocktake').pk)
ops_stock.permissions.add(Permission.objects.get(codename='create_stocktake').pk)
ops_stock.permissions.add(Permission.objects.get(codename='view_strequisition').pk)
ops_stock.permissions.add(Permission.objects.get(codename='create_strequisition').pk)
ops_stock.permissions.add(Permission.objects.get(codename='view_strequisitionitem').pk)
ops_stock.permissions.add(Permission.objects.get(codename='create_strequisitionitem').pk)
ops_stock.permissions.add(Permission.objects.get(codename='view_branchstock').pk)
ops_stock.permissions.add(Permission.objects.get(codename='create_branchstock').pk)
| 65.932611
| 175
| 0.763004
| 6,178
| 54,790
| 6.585141
| 0.020881
| 0.131259
| 0.257601
| 0.396996
| 0.952462
| 0.94966
| 0.947005
| 0.947005
| 0.937124
| 0.928103
| 0
| 0.000041
| 0.113816
| 54,790
| 831
| 176
| 65.932611
| 0.837851
| 0.009746
| 0
| 0.006993
| 0
| 0
| 0.197924
| 0.095321
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.005245
| null | null | 0.001748
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b4083d9df64925e12634d56c6043cbf442af60d6
| 13,619
|
py
|
Python
|
src/model_zoo/models_resnet8.py
|
chisyliu/A_General_Framework_for_Uncertainty_Estimation_in_Deep_Learning
|
da49ea5a040289d6e76e86bb06325fbfaf13803b
|
[
"MIT"
] | 1
|
2019-08-23T15:02:03.000Z
|
2019-08-23T15:02:03.000Z
|
src/model_zoo/models_resnet8.py
|
chisyliu/A_General_Framework_for_Uncertainty_Estimation_in_Deep_Learning
|
da49ea5a040289d6e76e86bb06325fbfaf13803b
|
[
"MIT"
] | null | null | null |
src/model_zoo/models_resnet8.py
|
chisyliu/A_General_Framework_for_Uncertainty_Estimation_in_Deep_Learning
|
da49ea5a040289d6e76e86bb06325fbfaf13803b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
import torch.nn as nn
import numpy as np
from opts import parser
FLAGS = parser.parse_args()
def init_kernel(m):
if isinstance(m, nn.Conv2d):
# Initialize kernels of Conv2d layers as kaiming normal
nn.init.kaiming_normal_(m.weight, mode='fan_in', nonlinearity='relu')
# Initialize biases of Conv2d layers at 0
nn.init.zeros_(m.bias)
class resnet8(nn.Module):
"""
Define model architecture.
# Arguments
img_channels: Number of channels in target image
img_width: Target image widht.
img_height: Target image height.
output_dim: Dimension of model output.
"""
def __init__(self, img_channels, output_dim):
super(resnet8, self).__init__()
self.layer1 = nn.Sequential(
nn.Conv2d(in_channels=img_channels,out_channels=32,
kernel_size=[5,5], stride=[2,2], padding=[5//2,5//2]),
nn.MaxPool2d(kernel_size=2))
self.residual_block_1a = nn.Sequential(
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(in_channels=32,out_channels=32, kernel_size=[3,3],
stride=[2,2], padding=[3//2,3//2]),
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(in_channels=32,out_channels=32, kernel_size=[3,3],
padding=[3//2,3//2]))
self.parallel_conv_1 = nn.Conv2d(in_channels=32,out_channels=32,
kernel_size=[1,1], stride=[2,2],
padding=[1//2,1//2])
self.residual_block_2a = nn.Sequential(
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(in_channels=32,out_channels=64, kernel_size=[3,3],
stride=[2,2], padding=[3//2,3//2]),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(in_channels=64,out_channels=64, kernel_size=[3,3],
padding=[3//2,3//2]))
self.parallel_conv_2 = nn.Conv2d(in_channels=32,out_channels=64,
kernel_size=[1,1], stride=[2,2],
padding=[1//2,1//2])
self.residual_block_3a = nn.Sequential(
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(in_channels=64,out_channels=128, kernel_size=[3,3],
stride=[2,2], padding=[3//2,3//2]),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(in_channels=128,out_channels=128, kernel_size=[3,3],
padding=[3//2,3//2]))
self.parallel_conv_3 = nn.Conv2d(in_channels=64,out_channels=128,
kernel_size=[1,1], stride=[2,2],
padding=[1//2,1//2])
self.output_dim = output_dim
self.last_block = nn.Sequential(
nn.ReLU(),
nn.Dropout2d(),
nn.Linear(6272,self.output_dim))
# Initialize layers exactly as in Keras
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):
nn.init.xavier_uniform_(m.weight, gain=nn.init.calculate_gain('relu'))
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
# Initialize kernels of Conv2d layers as kaiming normal
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
self.residual_block_1a.apply(init_kernel)
self.residual_block_2a.apply(init_kernel)
self.residual_block_3a.apply(init_kernel)
def forward(self, x):
x1 = self.layer1(x)
# First residual block
x2 = self.residual_block_1a(x1)
x1 = self.parallel_conv_1(x1)
x3 = x1.add(x2)
# Second residual block
x4 = self.residual_block_2a(x3)
x3 = self.parallel_conv_2(x3)
x5 = x3.add(x4)
# Third residual block
x6 = self.residual_block_3a(x5)
x5 = self.parallel_conv_3(x5)
x7 = x5.add(x6)
out = x7.view(x7.size(0), -1) # Flatten
out = self.last_block(out)
return out
class resnet8_MCDO(nn.Module):
"""
Define model architecture.
# Arguments
img_channels: Number of channels in target image
img_width: Target image widht.
img_height: Target image height.
output_dim: Dimension of model output.
Dropout is here applied after every convolutional layer,
not only after inner-product ones. Dropout will be enabled at test
time. As mentioned by Gal, place Dropout after conv layers and before
MaxPool.
"""
def __init__(self, img_channels, in_height, in_width, output_dim):
super(resnet8_MCDO, self).__init__()
p = FLAGS.dropout
self.layer1 = nn.Sequential(
nn.Conv2d(in_channels=img_channels,out_channels=32,
kernel_size=[5,5], stride=[2,2], padding=[5//2,5//2]),
nn.Dropout2d(p=p),
nn.MaxPool2d(kernel_size=2))
self.residual_block_1a = nn.Sequential(
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(in_channels=32,out_channels=32, kernel_size=[3,3],
stride=[2,2], padding=[3//2,3//2]),
nn.Dropout2d(p=p),
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(in_channels=32,out_channels=32, kernel_size=[3,3],
padding=[3//2,3//2]),
nn.Dropout2d(p=p))
self.parallel_conv_1 = nn.Sequential(
nn.Conv2d(in_channels=32,out_channels=32, kernel_size=[1,1],
stride=[2,2], padding=[1//2,1//2]),
nn.Dropout2d(p=p))
self.residual_block_2a = nn.Sequential(
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(in_channels=32,out_channels=64, kernel_size=[3,3],
stride=[2,2], padding=[3//2,3//2]),
nn.Dropout2d(p=p),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(in_channels=64,out_channels=64, kernel_size=[3,3],
padding=[3//2,3//2]),
nn.Dropout2d(p=p))
self.parallel_conv_2 = nn.Sequential(
nn.Conv2d(in_channels=32,out_channels=64, kernel_size=[1,1],
stride=[2,2], padding=[1//2,1//2]),
nn.Dropout2d(p=p))
self.residual_block_3a = nn.Sequential(
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(in_channels=64,out_channels=128, kernel_size=[3,3],
stride=[2,2], padding=[3//2,3//2]),
nn.Dropout2d(p=p),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(in_channels=128,out_channels=128, kernel_size=[3,3],
padding=[3//2,3//2]),
nn.Dropout2d(p=p))
self.parallel_conv_3 = nn.Sequential(
nn.Conv2d(in_channels=64,out_channels=128, kernel_size=[1,1],
stride=[2,2], padding=[1//2,1//2]),
nn.Dropout2d(p=p))
self.output_dim = output_dim
self.last_block = nn.Sequential(
nn.ReLU(),
nn.Linear(6272,self.output_dim))
# Initialize layers exactly as in Keras
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):
nn.init.xavier_uniform_(m.weight, gain=nn.init.calculate_gain('relu'))
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
# Initialize kernels of Conv2d layers as kaiming normal
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
self.residual_block_1a.apply(init_kernel)
self.residual_block_2a.apply(init_kernel)
self.residual_block_3a.apply(init_kernel)
def forward(self, x):
x1 = self.layer1(x)
# First residual block
x2 = self.residual_block_1a(x1)
x1 = self.parallel_conv_1(x1)
x3 = x1.add(x2)
# Second residual block
x4 = self.residual_block_2a(x3)
x3 = self.parallel_conv_2(x3)
x5 = x3.add(x4)
# Third residual block
x6 = self.residual_block_3a(x5)
x5 = self.parallel_conv_3(x5)
x7 = x5.add(x6)
out = x7.view(x7.size(0), -1) # Flatten
# We model the network to learn also log var
out = self.last_block(out)
return out
class resnet8_MCDO_ale(nn.Module):
"""
Define model architecture.
# Arguments
img_channels: Number of channels in target image
img_width: Target image widht.
img_height: Target image height.
output_dim: Dimension of model output.
Dropout is here applied after every convolutional layer,
not only after inner-product ones. Dropout will be enabled at test
time. As mentioned by Gal, place Dropout after conv layers and before
MaxPool.
"""
def __init__(self, img_channels, in_height, in_width, output_dim):
super(resnet8_MCDO_ale, self).__init__()
p = FLAGS.dropout
self.layer1 = nn.Sequential(
nn.Conv2d(in_channels=img_channels,out_channels=32,
kernel_size=[5,5], stride=[2,2], padding=[5//2,5//2]),
nn.Dropout2d(p=p),
nn.MaxPool2d(kernel_size=2))
self.residual_block_1a = nn.Sequential(
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(in_channels=32,out_channels=32, kernel_size=[3,3],
stride=[2,2], padding=[3//2,3//2]),
nn.Dropout2d(p=p),
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(in_channels=32,out_channels=32, kernel_size=[3,3],
padding=[3//2,3//2]),
nn.Dropout2d(p=p))
self.parallel_conv_1 = nn.Sequential(
nn.Conv2d(in_channels=32,out_channels=32, kernel_size=[1,1],
stride=[2,2], padding=[1//2,1//2]),
nn.Dropout2d(p=p))
self.residual_block_2a = nn.Sequential(
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(in_channels=32,out_channels=64, kernel_size=[3,3],
stride=[2,2], padding=[3//2,3//2]),
nn.Dropout2d(p=p),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(in_channels=64,out_channels=64, kernel_size=[3,3],
padding=[3//2,3//2]),
nn.Dropout2d(p=p))
self.parallel_conv_2 = nn.Sequential(
nn.Conv2d(in_channels=32,out_channels=64, kernel_size=[1,1],
stride=[2,2], padding=[1//2,1//2]),
nn.Dropout2d(p=p))
self.residual_block_3a = nn.Sequential(
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(in_channels=64,out_channels=128, kernel_size=[3,3],
stride=[2,2], padding=[3//2,3//2]),
nn.Dropout2d(p=p),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(in_channels=128,out_channels=128, kernel_size=[3,3],
padding=[3//2,3//2]),
nn.Dropout2d(p=p))
self.parallel_conv_3 = nn.Sequential(
nn.Conv2d(in_channels=64,out_channels=128, kernel_size=[1,1],
stride=[2,2], padding=[1//2,1//2]),
nn.Dropout2d(p=p))
self.output_dim = output_dim
self.last_block_mean = nn.Sequential(
nn.ReLU(),
nn.Linear(6272,self.output_dim))
self.last_block_var = nn.Sequential(
nn.ReLU(),
nn.Linear(6272,self.output_dim))
# Initialize layers exactly as in Keras
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):
nn.init.xavier_uniform_(m.weight, gain=nn.init.calculate_gain('relu'))
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
# Initialize kernels of Conv2d layers as kaiming normal
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
self.residual_block_1a.apply(init_kernel)
self.residual_block_2a.apply(init_kernel)
self.residual_block_3a.apply(init_kernel)
def forward(self, x):
x1 = self.layer1(x)
# First residual block
x2 = self.residual_block_1a(x1)
x1 = self.parallel_conv_1(x1)
x3 = x1.add(x2)
# Second residual block
x4 = self.residual_block_2a(x3)
x3 = self.parallel_conv_2(x3)
x5 = x3.add(x4)
# Third residual block
x6 = self.residual_block_3a(x5)
x5 = self.parallel_conv_3(x5)
x7 = x5.add(x6)
out = x7.view(x7.size(0), -1) # Flatten
# We model the network to learn also log var
out_mean = self.last_block_mean(out)
out_log_var = self.last_block_var(out)
out = {'mean': out_mean,
'log_var': out_log_var}
return out
| 36.029101
| 86
| 0.534474
| 1,754
| 13,619
| 3.978905
| 0.087799
| 0.067058
| 0.042986
| 0.077375
| 0.950996
| 0.940249
| 0.940249
| 0.940249
| 0.940249
| 0.940249
| 0
| 0.070726
| 0.34283
| 13,619
| 377
| 87
| 36.124668
| 0.70905
| 0.12791
| 0
| 0.904
| 0
| 0
| 0.002823
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028
| false
| 0
| 0.012
| 0
| 0.064
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b40cca76962f221c3fd6d988f10f039053d8f3f6
| 148,484
|
py
|
Python
|
src/kubernetes/client/apis/batch_v2alpha1_api.py
|
MarletteFunding/aws-kube-codesuite
|
ab4e5ce45416b83bffb947ab8d234df5437f4fca
|
[
"Apache-2.0"
] | 184
|
2017-12-20T21:50:06.000Z
|
2022-03-19T13:24:58.000Z
|
src/kubernetes/client/apis/batch_v2alpha1_api.py
|
MarletteFunding/aws-kube-codesuite
|
ab4e5ce45416b83bffb947ab8d234df5437f4fca
|
[
"Apache-2.0"
] | 15
|
2018-01-17T17:30:51.000Z
|
2021-12-16T14:25:09.000Z
|
src/kubernetes/client/apis/batch_v2alpha1_api.py
|
MarletteFunding/aws-kube-codesuite
|
ab4e5ce45416b83bffb947ab8d234df5437f4fca
|
[
"Apache-2.0"
] | 136
|
2018-01-09T22:52:06.000Z
|
2022-02-24T13:26:18.000Z
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class BatchV2alpha1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_namespaced_cron_job(self, namespace, body, **kwargs):
"""
create a CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_cron_job(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_cron_job_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_cron_job_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_cron_job_with_http_info(self, namespace, body, **kwargs):
"""
create a CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_cron_job_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_cron_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_scheduled_job(self, namespace, body, **kwargs):
"""
create a ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_scheduled_job(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_scheduled_job_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_scheduled_job_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_scheduled_job_with_http_info(self, namespace, body, **kwargs):
"""
create a ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_scheduled_job_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_scheduled_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_cron_job(self, namespace, **kwargs):
"""
delete collection of CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_cron_job(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_cron_job_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_cron_job_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_cron_job_with_http_info(self, namespace, **kwargs):
"""
delete collection of CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_cron_job_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_scheduled_job(self, namespace, **kwargs):
"""
delete collection of ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_scheduled_job(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_scheduled_job_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_scheduled_job_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_scheduled_job_with_http_info(self, namespace, **kwargs):
"""
delete collection of ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_scheduled_job_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_cron_job(self, name, namespace, body, **kwargs):
"""
delete a CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_cron_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_cron_job_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_cron_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_cron_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_cron_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_scheduled_job(self, name, namespace, body, **kwargs):
"""
delete a ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_scheduled_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_scheduled_job_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_scheduled_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_scheduled_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_scheduled_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cron_job_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_cron_job_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_cron_job_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_cron_job_for_all_namespaces_with_http_info(**kwargs)
return data
def list_cron_job_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_cron_job_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'include_uninitialized', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cron_job_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/cronjobs'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJobList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_cron_job(self, namespace, **kwargs):
"""
list or watch objects of kind CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_cron_job(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_cron_job_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_cron_job_with_http_info(namespace, **kwargs)
return data
def list_namespaced_cron_job_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_cron_job_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJobList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_scheduled_job(self, namespace, **kwargs):
"""
list or watch objects of kind ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_scheduled_job(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_scheduled_job_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_scheduled_job_with_http_info(namespace, **kwargs)
return data
def list_namespaced_scheduled_job_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_scheduled_job_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJobList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_scheduled_job_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_scheduled_job_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_scheduled_job_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_scheduled_job_for_all_namespaces_with_http_info(**kwargs)
return data
def list_scheduled_job_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_scheduled_job_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'include_uninitialized', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_scheduled_job_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/scheduledjobs'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJobList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_cron_job(self, name, namespace, body, **kwargs):
"""
partially update the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_cron_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_cron_job_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_cron_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_cron_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_cron_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_cron_job_status(self, name, namespace, body, **kwargs):
"""
partially update status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_cron_job_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_cron_job_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_cron_job_status_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_cron_job_status_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_cron_job_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_cron_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_cron_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_cron_job_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_cron_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_scheduled_job(self, name, namespace, body, **kwargs):
"""
partially update the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scheduled_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_scheduled_job_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scheduled_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_scheduled_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_scheduled_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_scheduled_job_status(self, name, namespace, body, **kwargs):
"""
partially update status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scheduled_job_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_scheduled_job_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_scheduled_job_status_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_scheduled_job_status_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scheduled_job_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_scheduled_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_scheduled_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_scheduled_job_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_scheduled_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_cron_job(self, name, namespace, **kwargs):
"""
read the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_cron_job(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_cron_job_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_cron_job_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_cron_job_with_http_info(self, name, namespace, **kwargs):
"""
read the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_cron_job_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_cron_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'exact' in params:
query_params['exact'] = params['exact']
if 'export' in params:
query_params['export'] = params['export']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_cron_job_status(self, name, namespace, **kwargs):
"""
read status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_cron_job_status(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_cron_job_status_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_cron_job_status_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_cron_job_status_with_http_info(self, name, namespace, **kwargs):
"""
read status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_cron_job_status_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_cron_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_cron_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_cron_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_scheduled_job(self, name, namespace, **kwargs):
"""
read the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scheduled_job(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_scheduled_job_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_scheduled_job_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_scheduled_job_with_http_info(self, name, namespace, **kwargs):
"""
read the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scheduled_job_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_scheduled_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'exact' in params:
query_params['exact'] = params['exact']
if 'export' in params:
query_params['export'] = params['export']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_scheduled_job_status(self, name, namespace, **kwargs):
"""
read status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scheduled_job_status(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_scheduled_job_status_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_scheduled_job_status_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_scheduled_job_status_with_http_info(self, name, namespace, **kwargs):
"""
read status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scheduled_job_status_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_scheduled_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_scheduled_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_scheduled_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_cron_job(self, name, namespace, body, **kwargs):
"""
replace the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_cron_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_cron_job_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_cron_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_cron_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_cron_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_cron_job_status(self, name, namespace, body, **kwargs):
"""
replace status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_cron_job_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_cron_job_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_cron_job_status_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_cron_job_status_with_http_info(self, name, namespace, body, **kwargs):
"""
replace status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_cron_job_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_cron_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_cron_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_cron_job_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_cron_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_scheduled_job(self, name, namespace, body, **kwargs):
"""
replace the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scheduled_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_scheduled_job_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scheduled_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_scheduled_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_scheduled_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_scheduled_job_status(self, name, namespace, body, **kwargs):
"""
replace status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scheduled_job_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_scheduled_job_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_scheduled_job_status_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_scheduled_job_status_with_http_info(self, name, namespace, body, **kwargs):
"""
replace status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scheduled_job_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_scheduled_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_scheduled_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_scheduled_job_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_scheduled_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 51.396331
| 457
| 0.613137
| 16,123
| 148,484
| 5.461018
| 0.019661
| 0.041795
| 0.018149
| 0.018808
| 0.993799
| 0.992709
| 0.99172
| 0.989472
| 0.987779
| 0.987336
| 0
| 0.002186
| 0.306686
| 148,484
| 2,888
| 458
| 51.414127
| 0.853098
| 0.379314
| 0
| 0.859877
| 1
| 0
| 0.228814
| 0.081923
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032126
| false
| 0
| 0.004785
| 0
| 0.084757
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b4237054c2a4c3bff9f719bd0b821c206581a106
| 30,641
|
py
|
Python
|
api/test_account.py
|
robpop/explore_engine
|
21942c73b28b7b22351877b9772773dfdf298667
|
[
"MIT"
] | null | null | null |
api/test_account.py
|
robpop/explore_engine
|
21942c73b28b7b22351877b9772773dfdf298667
|
[
"MIT"
] | null | null | null |
api/test_account.py
|
robpop/explore_engine
|
21942c73b28b7b22351877b9772773dfdf298667
|
[
"MIT"
] | null | null | null |
from django.urls import reverse
from rest_framework import status
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase, APIClient
from django.contrib.auth.models import User
from account.models import Profile
class AccountTests(APITestCase):
def setUp(self):
self.superuser = User.objects.create_user(username='unsift', email='unsift1@mailinator.com', password='BlueGreen123!')
self.account = User.objects.create_user(username='test', email='unsift2@mailinator.com', password='BlueGreen123!')
self.verified_email_account = User.objects.create_user(username='verified', email='unsift4@mailinator.com', password='BlueGreen123!')
self.superuser_token = Token.objects.get(user__username='unsift')
self.account_token = Token.objects.get(user__username='test')
self.verified_email_account_token = Token.objects.get(user__username='verified')
verified_profile = Profile.objects.get(account=self.verified_email_account)
verified_profile.emailVerified = True
verified_profile.save()
def test_api_root(self):
url = reverse('api:api-root')
data = {}
response = self.client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# username-check tests
def test_username_check(self):
url = reverse('api:username-check')
data = {
'username': 'available'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, 'Username is available')
def test_username_check_without_username(self):
url = reverse('api:username-check')
data = {}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'username': ['This field is required']})
def test_username_check_with_blank_username(self):
url = reverse('api:username-check')
data = {
'username': ''
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'username': ['This field must not be blank']})
def test_username_check_with_taken_username(self):
url = reverse('api:username-check')
data = {
'username': 'abcABC123.-_!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, 'Username is not valid')
# sign-up tests
def test_signup(self):
url = reverse('api:sign-up')
data = {
'username': 'available',
'email': 'unsift5@mailinator.com',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data, {
"username": "available",
"email": "unsift5@mailinator.com"
})
def test_signup_without_username(self):
url = reverse('api:sign-up')
data = {
'email': 'unsift5@mailinator.com',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'username': ['This field is required.']})
def test_signup_with_invalid_username(self):
url = reverse('api:sign-up')
data = {
'username': 'abcABC123.-_!',
'email': 'unsift5@mailinator.com',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'username': ['This value does not match the required pattern.']
})
def test_signup_with_short_username(self):
url = reverse('api:sign-up')
data = {
'username': 'abc',
'email': 'unsift5@mailinator.com',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'username': ['Ensure this field has at least 4 characters.']
})
def test_signup_with_long_username(self):
url = reverse('api:sign-up')
data = {
'username': 'abcdefghijklmnopqrstuvwxyzabcde',
'email': 'unsift5@mailinator.com',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'username': ['Ensure this field has no more than 30 characters.']
})
def test_signup_with_taken_username(self):
url = reverse('api:sign-up')
data = {
'username': 'test',
'email': 'unsift5@mailinator.com',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'username': ['This field must be unique.']
})
def test_signup_without_email(self):
url = reverse('api:sign-up')
data = {
'username': 'available',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'email': ['This field is required.']
})
def test_signup_with_invalid_email(self):
url = reverse('api:sign-up')
data = {
'username': 'available',
'email': 'invalid',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'email': ['Enter a valid email address.']
})
def test_signup_with_taken_email(self):
url = reverse('api:sign-up')
data = {
'username': 'available',
'email': 'unsift2@mailinator.com',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'email': ['This email is in use by an existing account']
})
def test_signup_without_password(self):
url = reverse('api:sign-up')
data = {
'username': 'available',
'email': 'unsift5@mailinator.com'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'password': ['This field is required.']
})
def test_signup_with_all_lowercase_common_password(self):
url = reverse('api:sign-up')
data = {
'username': 'available',
'email': 'unsift5@mailinator.com',
'password': 'abc'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
"non_field_errors": [
"This password is too short. It must contain at least 8 characters.",
"This password is too common.",
"The password must contain at least 1 uppercase letter, A-Z.",
"The password must contain at least 1 digit, 0-9.",
"The password must contain at least 1 symbol."
]
})
def test_signup_with_all_uppercase_common_password(self):
url = reverse('api:sign-up')
data = {
'username': 'available',
'email': 'unsift5@mailinator.com',
'password': 'ABC'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
"non_field_errors": [
"This password is too short. It must contain at least 8 characters.",
"This password is too common.",
"The password must contain at least 1 lowercase letter, a-z.",
"The password must contain at least 1 digit, 0-9.",
"The password must contain at least 1 symbol."
]
})
def test_signup_with_similar_password_to_username(self):
url = reverse('api:sign-up')
data = {
'username': 'available',
'email': 'unsift5@mailinator.com',
'password': 'Available123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
"non_field_errors": [
"The password is too similar to the username."
]
})
def test_signup_with_similar_password_to_email(self):
url = reverse('api:sign-up')
data = {
'username': 'available',
'email': 'unsift5@mailinator.com',
'password': 'Unsift5!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
"non_field_errors": [
"The password is too similar to the email address."
]
})
# is-2fa-enabled tests
# MANUAL TESTING REQUIRED
# test_is_2fa_enabled_with_2fa_enabled
def test_is_2fa_enabled_without_authentication(self):
url = reverse('api:is-2fa-enabled')
data = {}
response = self.client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data, {
"detail": "Authentication credentials were not provided."
})
def test_is_2fa_enabled_with_2fa_disabled(self):
url = reverse('api:is-2fa-enabled')
data = {}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, False)
# disable-2fa tests
# MANUAL TESTING REQUIRED
# test_disable_2fa_with_2fa_enabled
# test_disable_2fa_with_2fa_enabled_and_without_password
# test_disable_2fa_with_2fa_enabled_and_without_2fa_token
# test_disable_2fa_with_2fa_enabled_and_with_incorrect_length_2fa_token
# test_disable_2fa_with_2fa_enabled_and_with_non_integer_2fa_token
# test_disable_2fa_with_2fa_enabled_and_with_incorrect_2fa_token
# test_disable_2fa_with_2fa_enabled_and_with_incorrect_password
def test_disable_2fa_without_authentication(self):
url = reverse('api:disable-2fa')
data = {
'password': 'BlueGreen123!',
'token': 123456
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data, {
"detail": "Authentication credentials were not provided."
})
def test_disable_2fa_with_2fa_disabled(self):
url = reverse('api:disable-2fa')
data = {
'password': 'BlueGreen123!',
'token': 123456
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, 'This account does not have two-factor authentication enabled')
# obtain-api-token tests
# MANUAL TESTING REQUIRED
# test_obtain_api_token_with_2fa_enabled
# test_obtain_api_token_with_2fa_enabled_and_without_2fa_token
# test_obtain_api_token_with_2fa_enabled_and_with_incorrect_length_2fa_token
# test_obtain_api_token_with_2fa_enabled_and_with_non_integer_2fa_token
# test_obtain_api_token_with_2fa_enabled_and_with_incorrect_2fa_token
def test_obtain_api_token(self):
url = reverse('api:obtain-api-token')
data = {
'username': 'test',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, {
'token': self.account_token.key
})
def test_obtain_api_token_with_email_for_username_field(self):
url = reverse('api:obtain-api-token')
data = {
'username': 'unsift2@mailinator.com',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, {
'token': self.account_token.key
})
def test_obtain_api_token_without_username(self):
url = reverse('api:obtain-api-token')
data = {
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
"username, password": [
"Both of these fields are required"
]
})
def test_obtain_api_token_without_password(self):
url = reverse('api:obtain-api-token')
data = {
'username': 'test'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
"username, password": [
"Both of these fields are required"
]
})
def test_obtain_api_token_with_invalid_username(self):
url = reverse('api:obtain-api-token')
data = {
'username': 'unknown',
'password': 'BlueGreen123!'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
"username": [
"The provided username or email is invalid"
]
})
def test_obtain_api_token_with_incorrect_password(self):
url = reverse('api:obtain-api-token')
data = {
'username': 'test',
'password': 'incorrect'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, 'The provided credentials are invalid')
# change-password tests
def test_change_password(self):
url = reverse('api:change-password')
data = {
'old_password': 'BlueGreen123!',
'new_password': 'GreenBlue123!'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, 'Success')
def test_change_password_without_authentication(self):
url = reverse('api:change-password')
data = {
'old_password': 'BlueGreen123!',
'new_password': 'GreenBlue123!'
}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data, {
'detail': 'Authentication credentials were not provided.'
})
def test_change_password_without_old_password(self):
url = reverse('api:change-password')
data = {
'new_password': 'GreenBlue123!'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'old_password': [
'This field is required.'
]
})
def test_change_password_with_incorrect_old_password(self):
url = reverse('api:change-password')
data = {
'old_password': 'incorrect',
'new_password': 'GreenBlue123!'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'old_password': [
'This password is incorrect'
]
})
def test_change_password_with_short_old_password(self):
url = reverse('api:change-password')
data = {
'old_password': 'abc123',
'new_password': 'GreenBlue123!'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'old_password': [
'Ensure this field has at least 8 characters.'
]
})
def test_change_password_with_long_old_password(self):
url = reverse('api:change-password')
data = {
'old_password': 'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxy',
'new_password': 'GreenBlue123!'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'old_password': [
'Ensure this field has no more than 128 characters.'
]
})
def test_change_password_without_new_password(self):
url = reverse('api:change-password')
data = {
'old_password': 'BlueGreen123!'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'new_password': [
'This field is required.'
]
})
def test_change_password_with_blank_new_password(self):
url = reverse('api:change-password')
data = {
'old_password': 'BlueGreen123!',
'new_password': ''
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'new_password': [
'This field may not be blank.'
]
})
def test_change_password_with_all_lowercase_new_password(self):
url = reverse('api:change-password')
data = {
'old_password': 'BlueGreen123!',
'new_password': 'abcdefgh'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'new_password': [
"This password is too common.",
"The password must contain at least 1 uppercase letter, A-Z.",
"The password must contain at least 1 digit, 0-9.",
"The password must contain at least 1 symbol."
]
})
def test_change_password_with_all_uppercase_new_password(self):
url = reverse('api:change-password')
data = {
'old_password': 'BlueGreen123!',
'new_password': 'ABCDEFGH'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'new_password': [
"This password is too common.",
"The password must contain at least 1 lowercase letter, a-z.",
"The password must contain at least 1 digit, 0-9.",
"The password must contain at least 1 symbol."
]
})
def test_change_password_with_similar_password_to_username(self):
url = reverse('api:change-password')
data = {
'old_password': 'BlueGreen123!',
'new_password': 'Unsift123!'
}
# Superuser utilized because username 'test' does not trigger similarity error
token = self.superuser_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'new_password': [
'The password is too similar to the username.'
]
})
def test_change_password_with_similar_password_to_email(self):
url = reverse('api:change-password')
data = {
'old_password': 'BlueGreen123!',
'new_password': 'unsift2X!'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'new_password': [
'The password is too similar to the email address.'
]
})
# change-email tests
def test_change_email(self):
url = reverse('api:change-email')
data = {
'email': 'unsift3@mailinator.com'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, 'Success')
def test_change_email_without_authentication(self):
url = reverse('api:change-email')
data = {
'email': 'unsift3@mailinator.com'
}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data, {
'detail': 'Authentication credentials were not provided.'
})
def test_change_email_without_email(self):
url = reverse('api:change-email')
data = {}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'email': [
'This field is required.'
]
})
def test_change_email_with_blank_email(self):
url = reverse('api:change-email')
data = {
'email': ''
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'email': [
'This field may not be blank.'
]
})
def test_change_email_with_invalid_email(self):
url = reverse('api:change-email')
data = {
'email': 'invalid'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'email': [
'Enter a valid email address.'
]
})
def test_change_email_with_taken_email(self):
url = reverse('api:change-email')
data = {
'email': 'unsift1@mailinator.com'
}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'email': [
'This email is in use by a different account'
]
})
# email-verification-status tests
def test_email_verification_status(self):
url = reverse('api:email-verification-status')
data = {}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, {
'email_verified': False
})
def test_email_verification_status_with_email_verified(self):
url = reverse('api:email-verification-status')
data = {}
token = self.verified_email_account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, {
'email_verified': True
})
def test_email_verification_status_without_authentication(self):
url = reverse('api:email-verification-status')
data = {}
response = self.client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data, {
'detail': 'Authentication credentials were not provided.'
})
# resend-email-verification-link tests
def test_resend_email_verification_link(self):
url = reverse('api:resend-email-verification-link')
data = {}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, 'Success')
def test_resend_email_verification_link_without_authentication(self):
url = reverse('api:resend-email-verification-link')
data = {}
response = self.client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data, {
'detail': 'Authentication credentials were not provided.'
})
def test_resend_email_verification_link_with_verified_email(self):
url = reverse('api:resend-email-verification-link')
data = {}
token = self.verified_email_account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, 'Your email address is already verified')
# sign-out-everywhere tests
def test_sign_out_everywhere(self):
url = reverse('api:sign-out-everywhere')
data = {}
token = self.account_token
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, 'Success')
def test_sign_out_everywhere_without_authentication(self):
url = reverse('api:sign-out-everywhere')
data = {}
response = self.client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data, {
'detail': 'Authentication credentials were not provided.'
})
| 38.397243
| 160
| 0.615287
| 3,265
| 30,641
| 5.559265
| 0.058193
| 0.090078
| 0.138119
| 0.051512
| 0.9086
| 0.871357
| 0.842323
| 0.824362
| 0.797146
| 0.751033
| 0
| 0.017119
| 0.275578
| 30,641
| 798
| 161
| 38.397243
| 0.800604
| 0.037075
| 0
| 0.705449
| 0
| 0
| 0.206527
| 0.028326
| 0
| 0
| 0
| 0
| 0.16053
| 1
| 0.082474
| false
| 0.164948
| 0.008837
| 0
| 0.092784
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
b4605a8e533d782d473e8cec2cc8c0143e4a5348
| 278
|
py
|
Python
|
2021-10-Python-SEA-ENGRSL-Workshops/Workshop_1-Hello-Python/code/s07a_s3.py
|
shawnduong/manimations
|
1d36d9d1e7dff90a1a8da1e687ef442f750e29c5
|
[
"MIT"
] | null | null | null |
2021-10-Python-SEA-ENGRSL-Workshops/Workshop_1-Hello-Python/code/s07a_s3.py
|
shawnduong/manimations
|
1d36d9d1e7dff90a1a8da1e687ef442f750e29c5
|
[
"MIT"
] | null | null | null |
2021-10-Python-SEA-ENGRSL-Workshops/Workshop_1-Hello-Python/code/s07a_s3.py
|
shawnduong/manimations
|
1d36d9d1e7dff90a1a8da1e687ef442f750e29c5
|
[
"MIT"
] | null | null | null |
x = 0
if x < 1000 and x % 3 == 0:
print(x, "is a multiple of 3!")
x += 1
if x < 1000 and x % 3 == 0:
print(x, "is a multiple of 3!")
x += 1
if x < 1000 and x % 3 == 0:
print(x, "is a multiple of 3!")
x += 1
if x < 1000 and x % 3 == 0:
print(x, "is a multiple of 3!")
x += 1
| 19.857143
| 32
| 0.510791
| 66
| 278
| 2.151515
| 0.181818
| 0.084507
| 0.197183
| 0.28169
| 0.985915
| 0.985915
| 0.985915
| 0.985915
| 0.985915
| 0.985915
| 0
| 0.169231
| 0.298561
| 278
| 13
| 33
| 21.384615
| 0.558974
| 0
| 0
| 0.923077
| 0
| 0
| 0.273381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.307692
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
81ec802118192bbd10eb0c5030f92407e34b351e
| 5,043
|
py
|
Python
|
objects/CSCG/_3d/mesh/elements/coordinate_transformation/quad_3d.py
|
mathischeap/mifem
|
3242e253fb01ca205a76568eaac7bbdb99e3f059
|
[
"MIT"
] | 1
|
2020-10-14T12:48:35.000Z
|
2020-10-14T12:48:35.000Z
|
objects/CSCG/_3d/mesh/elements/coordinate_transformation/quad_3d.py
|
mathischeap/mifem
|
3242e253fb01ca205a76568eaac7bbdb99e3f059
|
[
"MIT"
] | null | null | null |
objects/CSCG/_3d/mesh/elements/coordinate_transformation/quad_3d.py
|
mathischeap/mifem
|
3242e253fb01ca205a76568eaac7bbdb99e3f059
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
from screws.quadrature import Quadrature
from screws.freeze.base import FrozenOnly
from objects.CSCG._3d.mesh.elements.coordinate_transformation.helpers.value_cache import \
ElementsCTValuesCache
class _3dCSCG_ECT_3d_QUAD(FrozenOnly):
def __init__(self, ect):
self._elements_ = ect._elements_
self._freeze_self_()
@staticmethod
def ___compute_xietasigma___(quad_degree, quad_type):
_Quadrature_ = Quadrature(quad_degree, category=quad_type)
quad_nodes = _Quadrature_.quad[0]
xi, eta, sigma = np.meshgrid(*quad_nodes, indexing='ij')
return xi, eta, sigma
def mapping(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'mapping', xi, eta, sigma)
def X(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'X', xi, eta, sigma)
def Y(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'Y', xi, eta, sigma)
def Z(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'Z', xi, eta, sigma)
def Jacobian_matrix(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'Jacobian_matrix', xi, eta, sigma)
def J00(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'J00', xi, eta, sigma)
def J01(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'J01', xi, eta, sigma)
def J02(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'J02', xi, eta, sigma)
def J10(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'J10', xi, eta, sigma)
def J11(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'J11', xi, eta, sigma)
def J12(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'J12', xi, eta, sigma)
def J20(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'J20', xi, eta, sigma)
def J21(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'J21', xi, eta, sigma)
def J22(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'J22', xi, eta, sigma)
def Jacobian(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'Jacobian', xi, eta, sigma, intermediateData=None)
def metric(self, quad_degree, quad_type):
"""g := det(G) = Jacobian ** 2."""
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'metric', xi, eta, sigma, intermediateData=None)
def metric_matrix(self, quad_degree, quad_type):
"""G, g_{i,j}."""
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'metric_matrix', xi, eta, sigma, intermediateData=None)
def inverse_Jacobian_matrix(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'inverse_Jacobian_matrix', xi, eta, sigma, intermediateData=None)
def inverse_Jacobian(self, quad_degree, quad_type):
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'inverse_Jacobian', xi, eta, sigma, intermediateData=None)
def inverse_metric_matrix(self, quad_degree, quad_type):
"""G^-1, g^{i,j}."""
xi, eta, sigma = self.___compute_xietasigma___(quad_degree, quad_type)
return ElementsCTValuesCache(self._elements_, 'inverse_metric_matrix', xi, eta, sigma, intermediateData=None)
| 57.306818
| 119
| 0.719215
| 621
| 5,043
| 5.341385
| 0.114332
| 0.12662
| 0.12662
| 0.22249
| 0.803437
| 0.790775
| 0.773289
| 0.726862
| 0.705758
| 0.678022
| 0
| 0.010342
| 0.175491
| 5,043
| 88
| 120
| 57.306818
| 0.787398
| 0.015467
| 0
| 0.266667
| 0
| 0
| 0.028485
| 0.008889
| 0
| 0
| 0
| 0
| 0
| 1
| 0.293333
| false
| 0
| 0.053333
| 0
| 0.64
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
c327034b61c98a4221372202a95ced76cb0216da
| 5,390
|
py
|
Python
|
tests/components/zwave_js/test_cover.py
|
miccico/core
|
14c205384171dee59c1a908f8449f9864778b2dc
|
[
"Apache-2.0"
] | 3
|
2021-04-27T16:37:48.000Z
|
2022-02-23T02:47:33.000Z
|
tests/components/zwave_js/test_cover.py
|
miccico/core
|
14c205384171dee59c1a908f8449f9864778b2dc
|
[
"Apache-2.0"
] | 48
|
2019-02-06T22:08:09.000Z
|
2022-03-31T06:02:22.000Z
|
tests/components/zwave_js/test_cover.py
|
miccico/core
|
14c205384171dee59c1a908f8449f9864778b2dc
|
[
"Apache-2.0"
] | 4
|
2019-02-04T15:56:36.000Z
|
2020-12-03T02:03:45.000Z
|
"""Test the Z-Wave JS cover platform."""
from zwave_js_server.event import Event
from homeassistant.components.cover import ATTR_CURRENT_POSITION
WINDOW_COVER_ENTITY = "cover.zws_12_current_value"
async def test_cover(hass, client, chain_actuator_zws12, integration):
"""Test the light entity."""
node = chain_actuator_zws12
state = hass.states.get(WINDOW_COVER_ENTITY)
assert state
assert state.state == "closed"
assert state.attributes[ATTR_CURRENT_POSITION] == 0
# Test setting position
await hass.services.async_call(
"cover",
"set_cover_position",
{"entity_id": WINDOW_COVER_ENTITY, "position": 50},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 6
assert args["valueId"] == {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "targetValue",
"propertyName": "targetValue",
"metadata": {
"label": "Target value",
"max": 99,
"min": 0,
"type": "number",
"readable": True,
"writeable": True,
"label": "Target value",
},
}
assert args["value"] == 50
client.async_send_command.reset_mock()
# Test setting position
await hass.services.async_call(
"cover",
"set_cover_position",
{"entity_id": WINDOW_COVER_ENTITY, "position": 0},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 6
assert args["valueId"] == {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "targetValue",
"propertyName": "targetValue",
"metadata": {
"label": "Target value",
"max": 99,
"min": 0,
"type": "number",
"readable": True,
"writeable": True,
"label": "Target value",
},
}
assert args["value"] == 0
client.async_send_command.reset_mock()
# Test opening
await hass.services.async_call(
"cover",
"open_cover",
{"entity_id": WINDOW_COVER_ENTITY},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 6
assert args["valueId"] == {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "targetValue",
"propertyName": "targetValue",
"metadata": {
"label": "Target value",
"max": 99,
"min": 0,
"type": "number",
"readable": True,
"writeable": True,
"label": "Target value",
},
}
assert args["value"] == 99
client.async_send_command.reset_mock()
# Test position update from value updated event
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": 6,
"args": {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "currentValue",
"newValue": 99,
"prevValue": 0,
"propertyName": "currentValue",
},
},
)
node.receive_event(event)
state = hass.states.get(WINDOW_COVER_ENTITY)
assert state.state == "open"
# Test closing
await hass.services.async_call(
"cover",
"close_cover",
{"entity_id": WINDOW_COVER_ENTITY},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 6
assert args["valueId"] == {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "targetValue",
"propertyName": "targetValue",
"metadata": {
"label": "Target value",
"max": 99,
"min": 0,
"type": "number",
"readable": True,
"writeable": True,
"label": "Target value",
},
}
assert args["value"] == 0
client.async_send_command.reset_mock()
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": 6,
"args": {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "currentValue",
"newValue": 0,
"prevValue": 0,
"propertyName": "currentValue",
},
},
)
node.receive_event(event)
state = hass.states.get(WINDOW_COVER_ENTITY)
assert state.state == "closed"
| 28.368421
| 70
| 0.538961
| 519
| 5,390
| 5.423892
| 0.17341
| 0.056838
| 0.063943
| 0.093783
| 0.85897
| 0.85897
| 0.836945
| 0.812078
| 0.812078
| 0.795737
| 0
| 0.018946
| 0.324304
| 5,390
| 189
| 71
| 28.518519
| 0.753981
| 0.028015
| 0
| 0.78882
| 0
| 0
| 0.257066
| 0.004999
| 0
| 0
| 0
| 0
| 0.15528
| 1
| 0
| false
| 0
| 0.012422
| 0
| 0.012422
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c357ddbd1bd11e1f3e84e355c57bdbca8442dd84
| 23,220
|
py
|
Python
|
exif/tests/modify_exif_baselines/modify_rational_hex.py
|
chbndrhnns/exif
|
65aa2d8bcdecf79d34752390310222a9bd5d5bb3
|
[
"MIT"
] | null | null | null |
exif/tests/modify_exif_baselines/modify_rational_hex.py
|
chbndrhnns/exif
|
65aa2d8bcdecf79d34752390310222a9bd5d5bb3
|
[
"MIT"
] | null | null | null |
exif/tests/modify_exif_baselines/modify_rational_hex.py
|
chbndrhnns/exif
|
65aa2d8bcdecf79d34752390310222a9bd5d5bb3
|
[
"MIT"
] | null | null | null |
"""APP1 segment hexadecimal baseline for test_modify_rational."""
from baseline import Baseline
MODIFY_RATIONAL_HEX_BASELINE = Baseline("""
FFE12AAE4578696600004D4D002A00000008000C010F000200000006000008B60110000200000009000008BC01
1200030000000100010000011A000500000001000008C6011B000500000001000008CE01280003000000010002
00000131000200000007000008D60132000200000014000008DE02130003000000010001000087690004000000
01000008F288250004000000010000171AEA1C0007000008180000009E000018AC1CEA00000008000001000000
521800005A00000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000004170706C65006950686F
6E65203700000000004800000001000000480000000131312E322E360000323031383A30333A31322031303A31
323A3037000022829A0005000000010000129C829D000500000001000012A48822000300000001000200008827
000300000001001400009000000700000004303232319003000200000014000012AC9004000200000014000012
C09101000700000004010203009201000A00000001000012D49202000500000001000012DC9203000A00000001
000012E49204000A00000001000012EC920700030000000100050000920900030000000100100000920A000500
000001000012F49214000300000004000012FC927C0007000003CE000013049291000200000004353532009292
00020000000435353200A00000070000000430313030A001000300000001FFFF0000A00200040000000100000F
C0A00300040000000100000BD0A21700030000000100020000A30100070000000101000000A402000300000001
00000000A40300030000000100000000A405000300000001001C0000A40600030000000100000000A432000500
000004000016D2A433000200000006000016F2A434000200000022000016F8EA1C00070000080C00000A90EA1D
00090000000100001048000000001CEA0000000800000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
000000000000000100000E6A0000000900000005323031383A30333A31322031303A31323A303700323031383A
30333A31322031303A31323A30370000007394000009C10000086F000004F900002A43000003B8000000000000
00010000018F0000006407DF05E708A905324170706C6520694F530000014D4D00120001000900000001000000
09000200070000022E000000EC00030007000000680000031A0004000900000001000000010005000900000001
000000C80006000900000001000000D40007000900000001000000010008000A0000000300000382000C000A00
0000020000039A000D00090000000100000033000E000900000001000000040010000900000001000000010014
00090000000100000001001600020000001D000003AA0017000900000001000000000019000900000001000000
00001A000200000006000003C8001F000900000001000000000000000062706C69737430304F1102007D017801
6B015B015601590168017A017E0179017D01800182017201650160018D01960192018F018F0190019201900197
019A01A701A7019A01830169016601810189019701A701B401BC01B701AE01AE01AB01A001930183017E017E01
7B0188019601A301B701C301CD01D001CE01CA01C201B801AC01A401A101A2019901CE00E700E800F0001F0140
016C018B018E0178016C0154014F0158015B0163018A00A600B8009200A100A600D1000201F900E000D900D700
D600C100B4004E005C008C00B900B8009500B500C700C400E200F400EC00E000D000C500940027008500B20000
01C2007C00A900A700A000B700C700BD00B500C100B00050002A007C006400C800AC00950096009F009700AF00
D100C400B100A700A600710026007F00720086009300A00090009800A800AD00D800DE00BC00B800C200810025
006B009B006A00890091009C009B00B600AE00D100E000B900AE00990057007C0071006C00730071009E00A300
A000AF00B400CE00E200A700A500A7006E00410050003F00720077009000950099009600E400B600C800AC00B7
00B30049003A0022002A00410057006E008A009D00A900A600B800A400AC009A006D005000300063005C005600
57005E006F006E0097007C006B0058005200520059004E0033007900760086006B0065005F005F005800520050
00480043003900300029001D000008000000000000020100000000000000010000000000000000000000000000
020C62706C6973743030D4010203040506070855666C6167735576616C75655974696D657363616C655565706F
63681001130001668F2C8EB8B9123B9ACA0010000811171D272D2F383D00000000000001010000000000000009
0000000000000000000000000000003FFFFFF58900000A940000023400008599FFFFF986000026630000008F00
0001000000000F000000404163303663356D51374C7777796E6269744D465034516D304A717058000071383235
73000000018F000000640000018F00000064000000090000000500000009000000054170706C65006950686F6E
652037206261636B2063616D65726120332E39396D6D20662F312E3800000F00010002000000024E0000000002
000500000003000017D40003000200000002570000000004000500000003000017EC0005000100000001000000
0000060005000000010000180400070005000000030000180C000C0002000000024B000000000D000500000001
0000182400100002000000024D00000000110005000000010000182C00170002000000024D0000000018000500
00000100001834001D00020000000B0000183C001F000500000001000018480000000000000029000000010000
002400000001000041fd000001f400000070000000010000000500000001000001A200000064075bcd15000f42
4000000011000000010000000C00000001000000070000000100000000000000010000C24D000000AB0000C24D
000000AB323031383A30333A3132000000000005000000010000530061006D0070006C00650020007500730065
007200200063006F006D006D0065006E0074002000730065007400200069006E002000570069006E0064006F00
7700730020004500780070006C006F007200650072002E0000000006010300030000000100060000011A000500
000001000018FA011B0005000000010000190201280003000000010002000002010004000000010000190A0202
0004000000010000119B0000000000000048000000010000004800000001FFD8FFDB0043000806060706050807
07070909080A0C140D0C0B0B0C1912130F141D1A1F1E1D1A1C1C20242E2720222C231C1C2837292C3031343434
1F27393D38323C2E333432FFDB0043010909090C0B0C180D0D1832211C21323232323232323232323232323232
3232323232323232323232323232323232323232323232323232323232323232323232FFC0001108007800A003
012100021101031101FFC4001F0000010501010101010100000000000000000102030405060708090A0BFFC400
B5100002010303020403050504040000017D01020300041105122131410613516107227114328191A1082342B1
C11552D1F02433627282090A161718191A25262728292A3435363738393A434445464748494A53545556575859
5A636465666768696A737475767778797A838485868788898A92939495969798999AA2A3A4A5A6A7A8A9AAB2B3
B4B5B6B7B8B9BAC2C3C4C5C6C7C8C9CAD2D3D4D5D6D7D8D9DAE1E2E3E4E5E6E7E8E9EAF1F2F3F4F5F6F7F8F9FA
FFC4001F0100030101010101010101010000000000000102030405060708090A0BFFC400B51100020102040403
040705040400010277000102031104052131061241510761711322328108144291A1B1C109233352F0156272D1
0A162434E125F11718191A262728292A35363738393A434445464748494A535455565758595A63646566676869
6A737475767778797A82838485868788898A92939495969798999AA2A3A4A5A6A7A8A9AAB2B3B4B5B6B7B8B9BA
C2C3C4C5C6C7C8C9CAD2D3D4D5D6D7D8D9DAE2E3E4E5E6E7E8E9EAF2F3F4F5F6F7F8F9FAFFDA000C0301000211
0311003F00F4D62477A61DE7A0AE8218D60DDC530838AA44913A16A88C5571131318E050013C55137265898F43
52080D4B00F2CE7935221DA793536289D5D48EB8A5DEBEB486481C74A717A8655C89C82DEF49D2A1B28439A633
91C534264A5778E952221DB5420F2BE6E94C922217814D3115DA2623EB4DFB3BF5AB892C6F90C7B5385B8EE1AA
EE40E5800ECD5208FD8D26343BCA53D452FD9626EA48A9B8C78B24EC4D3BEC8807534AE507938F5A694A963216
041A371E959B45A61F31ED4C68DFD28481B27008E94E0E4569633B8F129A78901A2C3E61C369F4A3621A109EA2
F94B9E9479429DC431A21EA69BE5AAF5DD54992C50A9D8B5218C7F78D0314023F8A9C5914659C0FC690C69B88F
FBD9A619A33D2972B0E6431896E805342CB9FE1A7CA2E60DB2E7EF63E94C6593BEE34EC1726478BA17FCEA4CC2
7F8E33F46ACEE55850887A11F9D2F9433C114D489B1208E9C10D1701C10D2EDF5A2E171A507A530C04F6FD6A93
21879440FF00EBD06227DAAEE8571A62FF0068D34C01BAFF002A2E8433ECABEA7F2A5FB27A1A3990598F5B723D
E9E2203AD4B65A42796334BB7B62A6E5D8F244D77553A9476C3C43235ADC0262B82AA08E3A118C8E7835A316A9
AAA3431C7AC4933B93BD4C4994C6383C75E6B85D668EB54917F52D6757D39616FB64325B95FDE3181728D9E071
EBFCEA01AEEB8F710C467B7D9228652221D093FE142AFA5C4E8D997175BD6D3516B43736E51977472A460F1EA4
67A75A2D7C59A879B109EE2128F2795FBB84925B9E9CE29FD618BD81A49E25BA17CB6BBEDB79E798D81DA4E01E
B52DBF8B83ED12C7192C485D84F38EBC1EF550AE9BB326542CAE5E5F1245C661EBD39EB53AEBF6E4730B8FD457
46A63ECC0EBD687A21A85FC456E8706239AA4D8BD931ADE24B70B91093FF0002A8CF89A26CECB7C81C93BBFF00
AD436C152645FF00096C3FF3EDFF008FFF00F5AA45F165901FBC8DD7E9CE7E94AEC7ECD8F3E28B151BA4495140
CF2B8E3F3A587C61A2C92044171231EB84E052E6635043A2F1568B3CEF0A5C0DEADB4A8E704F6AE0FE2778D26B
28ADF4FD2AF961924CB4E50E1F6F619ED9ACDCDD8AE43CF3518DADEED9AD24061572D137B1E40AD8B2F105EC77
12DEC4E8B3C8A3721E5588EFEC6B99AE64742972B35A7D78DFD8FEF2CF209DD2E1F001041E9E99AB31EB73CB73
0CD12F973041167A8DBDB8AC542C8D9D4BEB60BABCBA8EFE2BE8932550C4D1AE4719EBC56644D7D6D21281A581
6E44F14722F1CE4E33E9D6AA29244C9DCD8B6BBB8BD0D7102AC130F9786C8E3B1AA834F9A69A65777478BF7842
74E7924134A368BD4B6B996874367F6816ABE74D1B303D739FCCFAD5E798346010A7E95D91AAE491CEE0D311D9
F202C642E3A9E6AB4914B260BB1DB9CE0251CECA48688379524B052703E5E685B43920E4FE147336559217C8C2
E7B9E9D3A5412C113AF405873D726AD4999BB1125B426456DF83E841C8FE958DAF6A9A7E99FE8373E7A8963E5A
25C601F7A5298ECAD73CF6DAEDED2F566B63E6847FDD963B771E809A9B5A96F754BE50F06678E2C164F9B701D4
E47519AE7BB7A11D0EBAC9F4CD56C57CAE1C21C24ABB712FA74E47F8554D5746B9B253AA5A244D68177108DF32
81C127D066B28CDA9599D2E09C7991AF6D0C72D85A6B30C8B1C736219222996DC3F8B1EA6BA0B1D19277FB38B9
B86500B26155471D07F9E82A252B0D42E3AEE06D35DE5F3E4CA105F73FF092037F33DEA95C595B412A5B2249E5
B42E325F20B0FBA003D3BD4A95CAE5B18FE15BA5BF54F3549F2E52B276C9231F9735ACF72F63716EB7002ACDBA
3191D5C76CFAE3F91AB92F7AC34D285C279AE07EEB7C7F20DB855E4FA37F2A75BCC194894FCC3A103231569D91
0B565FB6BA037797395CF0411C1AB8F768114C6C58F7F4AB8BD4892219352755C32B951CD402FC5CCAA8AA4961
DCE2B548CDB15BFBC3208A63B795CB6066A9489B75239B5036D0B4E662628D7EE94CD79A789F567D6B54596244
0C80C4D923181DF8ACE654B44607971C633BDB79C93B791ED4EB69DED4929330DEBB4FCBC91E99F4359DEC423B
1F0DC693D94F3C985581E3971FC472DB4E3F0ADBB8912CFC37AED94ECC6660C62C8E3613EA3D339ACEA6B2B1D7
4F48DCC1F096B4F6712D84AEBF677903166FF967C8C9FA62BD4A5BCB4648E6B7BA08CAC191860640EBD7DAA311
1D6E8BA0FDDB32BEBB7105E47912EE8BE512EC6C839FFEBD73D7578D1C804B1EF0BB4AFCD91BB1CFF5A8A6B41D
49A4CC8B0D226D2A295D5FCE17126E18E158765FAD6BC0897D1BC5237CC30C03F546FF001F7ADE4EEEE611EC39
0CDE58472B2152473C9C54683F7C4950AA3A11D6A2F735574AE5F8C2A32AEE014F7C66AE1B64057136EC6785E7
354A561B85C64CAF131057E5EA33DC55228A7957DADDB22B78C8E59AB3181AE217C8F9C7A678CD594B92F0EE75
F988C91E98AA62452BA8A1BEB668E41BA2907201C1AF38D674D9F4CBA48188647C94900E71FD2A66AE825B19F2
245B300A927AE1B9350450E7F7AFB5B03EE9E2B24F4219D678684CD74F11DF1C6D0346FDB3E9FAFF005AE92775
9B4EB8B6B80DBCA18C9E00C63191EF4AA2F78EA84BDD3978BC2CF6EFB88B89616FEE1C66B7EDF74AA814FCB1FC
A09E481EF5537CC8C6375A1A31ACA016038917046EC60FB53D563906DBADC8E060638DFEA6B2B9B72B68991A28
ADDED1F6B46AA1941E491FE7FA55298C50CB1ED90FCC081DB03D0FE94AE68A368DD9792DF7480ACACC8E0303E9
486D0C6C4E582B9E4F520D245CB5893C40ACA10B86EC3E6E95ACB6B25B60B1407B1DBD6AAE4AB966E1629ADB33
1D8CBC8ACAC20FBC9F8D6907A18545EF0E06D723E66E9CE56A44F2E4521539C7AD55D8972897964F17EF1D02EE
51C0ED5E73E2DB3B97759E36325B2AE18A72067BD3E6BA14D6871D1A797F3A637FBF6A895A5123939C7F111D05
2B5D189E9335AC8FFBD8F70727BF5E2A24BC7926F2EE41126DE3D08FA8A95A9A6C6FE9F786187ECECD1B392405
0DF3271593796C2DAE8DC412C8E31B8E3921BBFD41FD2B3BD9D8DA30725724B2D5ADAE60DA0347708E5704D5F6
B69AEE131ABAAE40CEE18E47BD4B5666F092686A5AEF292BB6F70BB644E98C76C7A55A6B0475DDB5430FBA7B8C
F5A1B1C55D6A24402CA14C9F2A9E4633D6B41D15A168CC8E848EDD4D260B6B10A0DE8227E594803D4D69C33452
DA491167FB4AF004849A193768BBB19AD76CB01638EA3922B258A465D2446DBD88EB5A45984F72033400E442E1
71EB9A789D3811AF38EFDAAEEC956395F165FDF5B6A325D24A515EDC432153CE3E9FE7A537C2D6CCF629119209
63619C4873B41EA3D7142D10377651F1268DA614315ADDD9C77C18858D1B6AFBE49EF5E76EB35B4EF0BB725B07
0723F3AA46535667AC5A6A304F691ED5C955DB96393F9D0F344B2A2B58A9427693EBF4AC9DD33A29F2BDCA973A
A47A46A51B95C59BC837E002573EBED56ACA469D4BA3A00AC70430E454CA375735A6D29588AE34C2F76F74872C
E32C3BE474C1AD6D3269248B0EFF00BE1C30239FAD36EE8495A5A7516F25166B2DCA12D201B1940CEEEF8E2AF4
047D8B2932371D76139CFA566F6358BE856B9082691A791F682369276A8ED8C0EB57AD583811C849623839E0D3
E82D2E4AF652372A8778E33ED56062C1C3120C9FC64741410D5E562F096710865F99FF008431E48F5AC6BE9669
2E0BB28563C7B538EFA18CC846E270B183EF56634F293E75DAC4FE35A5C945492DAD24984F2DB6E2083B98640C
5702DE20483509ECF4AB74801999CB87E1B1D319E00EBC53429191A95C47A94EF752DA80F9C4CE0F04FB554D96
FE53C6D1A14C6432F0463B668BD8876BDC726A52DA31F2DDB07923AE6AF43E20B95DA73BD070BBCE48AD65052D
4A8CF958C935192EA17494EF571B5862B574BD46C2DE2F2AE639106DDAAC8723F1153CB65629CD377359AF2236
CB7105C48501E71DBEA29C352468F7C0AE1C756CF247707159246CDEBA1B51CC92DB70EA02E19DBAE011566C1E
25DB68ADBA36188994F51D7AFA566CA4F528EB70B4D7B6C5CFEE900E9C9E3D6AC4C970DF3DA2962CC0F3D3F0AA
BE88977BB3A4D3AF546F8A63CC4A158B71824567DDA99E7768DFF765CF23B5677F7AC6B1568DC5D46EAE74EB10
D1C41CA0E99E40A8ACF65FC02E60732AB7DE42BCA9EF5A2DAE8E67BD87AC652605D01DDF2938C63DA9D75770DA
283B09C753E9F9D31232755D694E9773C88E10BB5801B99811E95E6F65A3C124C8D34D8B69321E51FF002CC900
8527A679A7160E3B047716F697725B5A4524B2676A9FBCB20FF77D7E954A795DDDD2487CA60DCAEDDA57D88AAB
194AC5B1E18BE99433BC3181D72D923F2ABB0F85191496BC8CB0E891A1626B5F68914E04D2F868C16ED3B5D2A3
0C7EE9930CD9F4A7C3A6DBC11F9734A5DDBF8106063EA6A5CD0D40B42CECA384C6B113DCEE72067B7152405215
2A90AA8EBF7EA1BB9A2562D5ABB6640D2431C6DD438E0E3E9579648B4F58CC096F142B92A236270DD7BF6A9B0E
F61F15DC7773AA4F3208F3B896FE1F502B7DA6D3628112CEE220AC32E03E76FF008544AE8B8ABBB98B25D4114F
79F6BB978CCB2F41CFCA30294F886CAC900B48A4B84071BA43B4367D075A396E8A94FA10DEF89A69A1D8218D93
A3277FCEAA41E22B9B65DB1421727F8063354A36462F7B90DB6BDA879CF9732AE7288C8495CFD2ABA5C1BB9253
35CCC8F236E393800D532513A258A4322DCDF328C60007EF7E7D8D6496D2E3B296D22491D5CFCC431009EC7DE9
265348C1D461164B09B676F3836448A307DB9F51594D7134B334B2CACCCFF333B9C963DEB45A9CF2763D154BEE
52DB377FB200AD5B5413B9489897519E3BFE559E86E437BA6DD4F7AA15582A8CE6450D9FCFF9D360D0E18A454B
8762C464765FA669DD0AE25D695106020408ABF78BC84835545B44CEC5A445F719EBEC2828ACE2276E5F001C72
0922836C819774C42F7F93A0A004898046F318C6A3804AE770F5A8CC91E095C7F8FD69E8086ACC4925893EC286
951B6952430E99A5E4221F346FCEF393C6693CD9438712B1C7BD30236BCB8030934807A06AAB25C4A1BEF13CFE
74C4466E0B100FE154E4BC9A366404ED1DF1C504B653925775FBE724E7AD40E4107232A7D29A3167A4B58DEAC3
B8D94A8A3D45751A4409A6D8EC65CCCDF34A476F6AC99D45B7BE8B0498C9C74C8EB59BAA4F7135BE523F2A31C9
2ADF352123989A73C8C64679C9CD406E1C740BC0C62A8A18A776E053767F0A4DE563033F5E698884C819B6F5EF
4AA081D57A6706810E0032676E1B1C73C9A6F9323B280986C725CE050030584F904CB1853DCBE7F0E2ADFF0063
39507ED90EEECB834C4324D0E5C645CC7C9EC0D569B45911C0F30153C6403C534C452B986DAD0E24999B8CFC8B
D6AB4DF66B940209B6F192B21C66992D993C899810383F5A182AA13B8373C76A7E4647FFD9
""")
| 92.88
| 94
| 0.946124
| 260
| 23,220
| 84.476923
| 0.638462
| 0.712985
| 1.044892
| 1.360408
| 0.364688
| 0.364688
| 0.364688
| 0.364688
| 0.364688
| 0.364688
| 0
| 0.831294
| 0.053187
| 23,220
| 249
| 95
| 93.253012
| 0.167751
| 0.002541
| 0
| 0.361789
| 0
| 0
| 0.996502
| 0.943813
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004065
| 0
| 0.004065
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c35e24368951899848612d510185346d9565729c
| 4,077
|
py
|
Python
|
vq_vae/test_nearest_embed.py
|
christopher-beckham/VQ-VAE
|
38f6ae34c202b04d53336f40ca7d6dfb18705ea6
|
[
"BSD-3-Clause"
] | null | null | null |
vq_vae/test_nearest_embed.py
|
christopher-beckham/VQ-VAE
|
38f6ae34c202b04d53336f40ca7d6dfb18705ea6
|
[
"BSD-3-Clause"
] | null | null | null |
vq_vae/test_nearest_embed.py
|
christopher-beckham/VQ-VAE
|
38f6ae34c202b04d53336f40ca7d6dfb18705ea6
|
[
"BSD-3-Clause"
] | 1
|
2022-02-04T04:01:59.000Z
|
2022-02-04T04:01:59.000Z
|
import unittest
import numpy as np
import torch
from torch.autograd import Variable
from vq_vae.nearest_embed import nearest_embed
# class NearestEmbedTest(unittest.TestCase):
# def test_something(self):
#
# emb = Variable(torch.eye(10, 10).double())
# a = np.array(([1,0,0,0,0,0,0,0,0,0],
# [0,1,0,0,0,0,0,0,0,0]), dtype=np.double)
# input = Variable(torch.from_numpy(a))
# z_q = nearest_embed(input, emb, dim=1)
# self.assertEqual(True, torch.equal(z_q.data, input.data))
class NearestEmbed2dTest(unittest.TestCase):
def test_something(self):
# inputs
emb = Variable(torch.eye(5, 5).double(), requires_grad=True)
a = np.array(([[[0.9, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.8, 0.0, 0.0, 0.0]],
[[0.0, 0.0, 0.7, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.6, 0.0]]]), dtype=np.double).reshape((2, 5, 2))
# expected results
result = np.array(([[[1., 0., 0., 0., 0.],
[0., 1., 0., 0., 0.]],
[[0., 0., 1., 0., 0.],
[0., 0., 0., 1., 0.]]]), dtype=np.double).reshape((2, 5, 2))
grad_input = np.array(([[[1., 0., 0., 0., 0.],
[0., 1., 0., 0., 0.]],
[[0., 0., 1., 0., 0.],
[0., 0., 0., 1., 0.]]]), dtype=np.double).reshape((2, 5, 2))
grad_emb = np.array(([[1., 0., 0., 0., 0.],
[0., 1., 0., 0., 0.],
[0., 0., 1., 0., 0.],
[0., 0., 0., 1., 0.],
[0., 0., 0., 0., 0.]]), dtype=np.double)
grad_input = torch.from_numpy(grad_input).double()
grad_emb = torch.from_numpy(grad_emb).double()
input = Variable(torch.from_numpy(a).double(), requires_grad=True)
z_q = nearest_embed(input, emb)
(0.5 * z_q.pow(2)).sum().backward(retain_graph=True)
result = torch.from_numpy(result)
self.assertEqual(True, torch.equal(z_q.data, result))
self.assertEqual(True, torch.equal(input.grad.data, grad_input))
self.assertEqual(True, torch.equal(emb.grad.data, grad_emb))
def test_multiple_same_embedding(self):
# inputs
emb = Variable(torch.eye(5, 5).double(), requires_grad=True)
a = np.array(([[[0.9, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.8, 0.0, 0.0, 0.0]],
[[0.0, 0.7, 0.0, 0.0, 0.0],
[0.0, 0.6, 0.0, 0.0, 0.0]]]), dtype=np.double).reshape((2, 5, 2))
# expected results
result = np.array(([[[1., 0., 0., 0., 0.],
[0., 1., 0., 0., 0.]],
[[0., 1., 0., 0., 0.],
[0., 1., 0., 0., 0.]]]), dtype=np.double).reshape((2, 5, 2))
grad_input = np.array(([[[1., 0., 0., 0., 0.],
[0., 1., 0., 0., 0.]],
[[0., 1., 0., 0., 0.],
[0., 1., 0., 0., 0.]]]), dtype=np.double).reshape((2, 5, 2))
grad_emb = np.array(([[1., 0., 0., 0., 0.],
[0., 1., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.]]), dtype=np.double)
grad_input = torch.from_numpy(grad_input).double()
grad_emb = torch.from_numpy(grad_emb).double()
input = Variable(torch.from_numpy(a).double(), requires_grad=True)
z_q = nearest_embed(input, emb)
(0.5 * z_q.pow(2)).sum().backward(retain_graph=True)
result = torch.from_numpy(result)
self.assertEqual(True, torch.equal(z_q.data, result))
self.assertEqual(True, torch.equal(input.grad.data, grad_input))
self.assertEqual(True, torch.equal(emb.grad.data, grad_emb))
if __name__ == '__main__':
unittest.main()
| 42.030928
| 93
| 0.44052
| 571
| 4,077
| 3.043783
| 0.108581
| 0.188723
| 0.2313
| 0.241657
| 0.857883
| 0.857883
| 0.806099
| 0.789988
| 0.765823
| 0.765247
| 0
| 0.10038
| 0.354918
| 4,077
| 96
| 94
| 42.46875
| 0.560456
| 0.107432
| 0
| 0.75
| 0
| 0
| 0.002207
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 1
| 0.03125
| false
| 0
| 0.078125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c3624a92a2c08dd08348a39bc1989b0783bec274
| 35
|
py
|
Python
|
HelloPython.py
|
Guristasrabbit/HelloGit
|
a4324e08eda0ee0fa8ece6da03eba181efb33d61
|
[
"Apache-2.0"
] | null | null | null |
HelloPython.py
|
Guristasrabbit/HelloGit
|
a4324e08eda0ee0fa8ece6da03eba181efb33d61
|
[
"Apache-2.0"
] | null | null | null |
HelloPython.py
|
Guristasrabbit/HelloGit
|
a4324e08eda0ee0fa8ece6da03eba181efb33d61
|
[
"Apache-2.0"
] | null | null | null |
print('66666666')
print('乌七八糟的')
| 7
| 17
| 0.657143
| 4
| 35
| 5.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.258065
| 0.114286
| 35
| 5
| 18
| 7
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0.371429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
c379f690e8abc549e8d121a795fd2396bf79ace4
| 73
|
py
|
Python
|
quicklib/utils.py
|
yonatanp/quicklib
|
bd96c6ed5204d0c4e802b320d285d22248beb243
|
[
"MIT"
] | 1
|
2021-12-16T07:27:59.000Z
|
2021-12-16T07:27:59.000Z
|
quicklib/utils.py
|
yonatanp/quicklib
|
bd96c6ed5204d0c4e802b320d285d22248beb243
|
[
"MIT"
] | 6
|
2018-01-15T11:51:37.000Z
|
2019-08-18T13:37:52.000Z
|
quicklib/utils.py
|
yonatanp/quicklib
|
bd96c6ed5204d0c4e802b320d285d22248beb243
|
[
"MIT"
] | null | null | null |
import os
def is_packaging():
return not os.path.exists("PKG-INFO")
| 14.6
| 41
| 0.69863
| 12
| 73
| 4.166667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164384
| 73
| 5
| 41
| 14.6
| 0.819672
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6f0c123371f40a7de5c51c34dc02979c2af7caca
| 122
|
py
|
Python
|
tweetGenerator/__init__.py
|
Aarif123456/tweetGenerator
|
7b3f96e43747530af738abbb9c579996b22839c0
|
[
"MIT"
] | 1
|
2021-03-20T19:24:59.000Z
|
2021-03-20T19:24:59.000Z
|
tweetGenerator/__init__.py
|
Aarif123456/tweetGenerator
|
7b3f96e43747530af738abbb9c579996b22839c0
|
[
"MIT"
] | null | null | null |
tweetGenerator/__init__.py
|
Aarif123456/tweetGenerator
|
7b3f96e43747530af738abbb9c579996b22839c0
|
[
"MIT"
] | 1
|
2021-03-20T19:25:00.000Z
|
2021-03-20T19:25:00.000Z
|
from .pySparkManager import *
from .createTargetList import *
from .createCorpus import *
from .generateResponse import *
| 24.4
| 31
| 0.803279
| 12
| 122
| 8.166667
| 0.5
| 0.306122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 122
| 4
| 32
| 30.5
| 0.924528
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6f2e177d626558de29f12add3bd838ae96fc9092
| 19,639
|
py
|
Python
|
python/RLrecon/environments/fixed_environment.py
|
syedsaifhasan/rl_reconstruct
|
1462d3650c3334083a7b4cc34c88e6f5d1095ce3
|
[
"BSD-3-Clause"
] | 3
|
2019-08-19T12:51:41.000Z
|
2021-03-29T11:28:06.000Z
|
python/RLrecon/environments/fixed_environment.py
|
syedsaifhasan/rl_reconstruct
|
1462d3650c3334083a7b4cc34c88e6f5d1095ce3
|
[
"BSD-3-Clause"
] | null | null | null |
python/RLrecon/environments/fixed_environment.py
|
syedsaifhasan/rl_reconstruct
|
1462d3650c3334083a7b4cc34c88e6f5d1095ce3
|
[
"BSD-3-Clause"
] | 2
|
2019-01-14T07:55:40.000Z
|
2021-12-11T13:34:35.000Z
|
from __future__ import print_function
import numpy as np
from environment import BaseEnvironment
from RLrecon import math_utils
class FixedEnvironmentV0(BaseEnvironment):
def __init__(self,
world_bounding_box,
random_reset=True,
radius=7.0,
height=3.0,
angle_amount=math_utils.degrees_to_radians(180. / 8.),
yaw_amount=math_utils.degrees_to_radians(180. / 8.),
**kwargs):
"""Initialize environment.
Args:
world_bounding_box (BoundingBox): Overall bounding box of the world to restrict motion.
engine (BaseEngine): Simulation engine (i.e. Unreal Engine wrapper).
mapper: Occupancy mapper (i.e. OctomapExt interface).
clear_size (float): Size of bounding box to clear in the occupancy map on reset.
random_reset (bool): Use random pose when resetting.
radius (float): Radius of orbit.
height (float): Height of orbit.
yaw_amount (float): Scale of yaw rotations.
angle_amount (float): Scale of orbital motion.
use_ros (bool): Whether to use ROS and publish on some topics.
ros_pose_topic (str): If ROS is used publish agent poses on this topic.
ros_world_frame (str): If ROS is used this is the id of the world frame.
"""
self._random_reset = random_reset
self._radius = radius
self._height = height
self._angle_amount = angle_amount
self._yaw_amount = yaw_amount
index1_range = 6
index2_range = 1
def _action_function(index1, index2, pose):
new_theta = 2 * np.pi * index1 / float(index1_range)
new_location = self._get_orbit_location(new_theta)
new_yaw = new_theta + np.pi
new_yaw += (index2 - index2_range / 2) * np.pi / 2.
new_orientation_rpy = np.array([0, 0, new_yaw])
# print("new_theta:", new_theta)
# print("new_yaw:", new_yaw)
new_pose = self.Pose(new_location, new_orientation_rpy)
valid = True
return valid, new_pose
action_list = []
update_map_flags = []
action_rewards = []
for index1 in xrange(index1_range):
for index2 in xrange(index2_range):
# Need to create a new scope here to capture index1, index2 by value
def create_lambda(idx1, idx2):
def lambda_fn(pose):
return _action_function(idx1, idx2, pose)
return lambda_fn
action_list.append(create_lambda(index1, index2))
update_map_flags.append(True)
action_rewards.append(-100.0)
self._obs_level = 2
self._obs_size_x = 8
self._obs_size_y = self._obs_size_x
self._obs_size_z = self._obs_size_x
super(FixedEnvironmentV0, self).__init__(
world_bounding_box,
action_list,
update_map_flags=update_map_flags,
action_rewards=action_rewards,
terminal_score_threshold=0.6,
**kwargs)
def _get_orbit_angle(self, pose):
theta = np.arctan2(pose.location()[1], pose.location()[0])
return theta
def _get_orbit_location(self, theta):
x = self._radius * np.cos(theta)
y = self._radius * np.sin(theta)
z = self._height
location = np.array([x, y, z])
return location
def get_observation_shapes(self):
return [(self.get_num_of_actions(),)]
def _get_observation(self, pose):
return [np.array(self._action_counter)]
# location = self.get_location()
# # orientation_rpy = self.get_orientation_rpy()
# orientation_quat = self.get_orientation_quat()
# # return [location, orientation_quat, occupancies_3d]
# # return [location, orientation_quat, grid_3d]
# previous_state_orientation_quat = math_utils.convert_rpy_to_quat(self._previous_state.orientation_rpy())
# orientation_quat *= np.sign(orientation_quat[3])
# previous_state_orientation_quat *= np.sign(previous_state_orientation_quat[3])
# return [location, orientation_quat, self._previous_state.location(), previous_state_orientation_quat]
def perform_action(self, action_index, pose=None):
observation, reward, terminal, info = super(FixedEnvironmentV0, self).perform_action(action_index, pose)
self._action_counter[action_index] += 0.1
# self._action_counter[action_index] = np.min([self._action_counter[action_index], 1])
print("self._action_counter:", self._action_counter)
return observation, reward, terminal, info
def reset(self, **kwargs):
"""Resets the environment. Orbit angle is set to zero or randomly initialized."""
# if pose is None:
# pose = self.get_pose()
# theta = self._get_orbit_angle(pose)
# pose = self._get_orbit_pose(theta)
if self._random_reset:
theta = 2 * np.pi * np.random.rand()
if np.random.rand() < 0.25:
yaw = 2 * np.pi * np.random.rand()
else:
d_yaw = np.pi / 4 * (np.random.rand() - 0.5)
yaw = theta + np.pi + d_yaw
else:
theta = 0
yaw = theta + np.pi
location = self._get_orbit_location(theta)
roll = 0
pitch = 0
orientation_rpy = np.array([roll, pitch, yaw])
pose = self.Pose(location, orientation_rpy)
self._action_counter = np.zeros((self.get_num_of_actions()))
if self._random_reset:
action_index = np.random.randint(0, self.get_num_of_actions())
else:
action_index = 0
_, pose = self._action_list[action_index](pose)
# pose = self.simulate_action_on_pose(pose, action_index)
return super(FixedEnvironmentV0, self).reset(pose, **kwargs)
def is_action_allowed_on_pose(self, pose, action_index):
return True
class FixedEnvironmentV1(BaseEnvironment):
def __init__(self,
world_bounding_box,
random_reset=True,
radius=7.0,
height=3.0,
angle_amount=math_utils.degrees_to_radians(180. / 8.),
yaw_amount=math_utils.degrees_to_radians(180. / 8.),
**kwargs):
"""Initialize environment.
Args:
world_bounding_box (BoundingBox): Overall bounding box of the world to restrict motion.
engine (BaseEngine): Simulation engine (i.e. Unreal Engine wrapper).
mapper: Occupancy mapper (i.e. OctomapExt interface).
clear_size (float): Size of bounding box to clear in the occupancy map on reset.
random_reset (bool): Use random pose when resetting.
radius (float): Radius of orbit.
height (float): Height of orbit.
yaw_amount (float): Scale of yaw rotations.
angle_amount (float): Scale of orbital motion.
use_ros (bool): Whether to use ROS and publish on some topics.
ros_pose_topic (str): If ROS is used publish agent poses on this topic.
ros_world_frame (str): If ROS is used this is the id of the world frame.
"""
self._random_reset = random_reset
self._radius = radius
self._height = height
self._angle_amount = angle_amount
self._yaw_amount = yaw_amount
index1_range = 6
index2_range = 1
def _action_function(index1, index2, pose):
new_theta = 2 * np.pi * index1 / float(index1_range)
new_location = self._get_orbit_location(new_theta)
new_yaw = new_theta + np.pi
new_yaw += (index2 - index2_range / 2) * np.pi / 2.
new_orientation_rpy = np.array([0, 0, new_yaw])
# print("new_theta:", new_theta)
# print("new_yaw:", new_yaw)
new_pose = self.Pose(new_location, new_orientation_rpy)
valid = True
return valid, new_pose
action_list = []
update_map_flags = []
action_rewards = []
for index1 in xrange(index1_range):
for index2 in xrange(index2_range):
# Need to create a new scope here to capture index1, index2 by value
def create_lambda(idx1, idx2):
def lambda_fn(pose):
return _action_function(idx1, idx2, pose)
return lambda_fn
action_list.append(create_lambda(index1, index2))
update_map_flags.append(True)
action_rewards.append(-100.0)
self._obs_level = 2
self._obs_size_x = 8
self._obs_size_y = self._obs_size_x
self._obs_size_z = self._obs_size_x
super(FixedEnvironmentV1, self).__init__(
world_bounding_box,
action_list,
update_map_flags=update_map_flags,
action_rewards=action_rewards,
terminal_score_threshold=0.6,
**kwargs)
def _get_orbit_angle(self, pose):
theta = np.arctan2(pose.location()[1], pose.location()[0])
return theta
def _get_orbit_location(self, theta):
x = self._radius * np.cos(theta)
y = self._radius * np.sin(theta)
z = self._height
location = np.array([x, y, z])
return location
def get_observation_shapes(self):
return [
(self._obs_size_x, self._obs_size_y, self._obs_size_z, 2)
]
def _get_observation(self, pose):
level = self._obs_level
size_x = self._obs_size_x
size_y = self._obs_size_y
size_z = self._obs_size_z
# center = self.get_location()
# orientation_rpy = self.get_orientation_rpy()
center = np.array([0, 0, 0])
orientation_rpy = np.array([0, 0, 0])
# We query a subvolume of the occupancy map so that z-axis is aligned with gravity (roll = pitch = 0)
# query_orientation_rpy = np.array([0, 0, orientation_rpy[2]])
query_orientation_rpy = np.array([0, orientation_rpy[1], orientation_rpy[2]])
# TODO: Should be exposed in environment
res = self._mapper.perform_query_subvolume_rpy(
center, query_orientation_rpy, level, size_x, size_y, size_z)
occupancies = np.asarray(res.occupancies, dtype=np.float32)
occupancies_3d = np.reshape(occupancies, (size_x, size_y, size_z))
observation_certainties = np.asarray(res.observation_certainties, dtype=np.float32)
observation_certainties_3d = np.reshape(observation_certainties, (size_x, size_y, size_z))
grid_3d = np.stack([occupancies_3d, observation_certainties_3d], axis=-1)
return [np.array(grid_3d)]
# location = self.get_location()
# # orientation_rpy = self.get_orientation_rpy()
# orientation_quat = self.get_orientation_quat()
# # return [location, orientation_quat, occupancies_3d]
# # return [location, orientation_quat, grid_3d]
# previous_state_orientation_quat = math_utils.convert_rpy_to_quat(self._previous_state.orientation_rpy())
# orientation_quat *= np.sign(orientation_quat[3])
# previous_state_orientation_quat *= np.sign(previous_state_orientation_quat[3])
# return [location, orientation_quat, self._previous_state.location(), previous_state_orientation_quat]
def reset(self, **kwargs):
"""Resets the environment. Orbit angle is set to zero or randomly initialized."""
# if pose is None:
# pose = self.get_pose()
# theta = self._get_orbit_angle(pose)
# pose = self._get_orbit_pose(theta)
if self._random_reset:
theta = 2 * np.pi * np.random.rand()
if np.random.rand() < 0.25:
yaw = 2 * np.pi * np.random.rand()
else:
d_yaw = np.pi / 4 * (np.random.rand() - 0.5)
yaw = theta + np.pi + d_yaw
else:
theta = 0
yaw = theta + np.pi
location = self._get_orbit_location(theta)
roll = 0
pitch = 0
orientation_rpy = np.array([roll, pitch, yaw])
pose = self.Pose(location, orientation_rpy)
if self._random_reset:
action_index = np.random.randint(0, self.get_num_of_actions())
else:
action_index = 0
_, pose = self._action_list[action_index](pose)
# pose = self.simulate_action_on_pose(pose, action_index)
print("action_index:", action_index)
print("pose:", pose)
return super(FixedEnvironmentV1, self).reset(pose, **kwargs)
def is_action_allowed_on_pose(self, pose, action_index):
return True
class FixedEnvironmentV2(BaseEnvironment):
def __init__(self,
world_bounding_box,
random_reset=True,
radius=7.0,
height=3.0,
**kwargs):
"""Initialize environment.
Args:
world_bounding_box (BoundingBox): Overall bounding box of the world to restrict motion.
engine (BaseEngine): Simulation engine (i.e. Unreal Engine wrapper).
mapper: Occupancy mapper (i.e. OctomapExt interface).
clear_size (float): Size of bounding box to clear in the occupancy map on reset.
random_reset (bool): Use random pose when resetting.
radius (float): Radius of orbit.
height (float): Height of orbit.
yaw_amount (float): Scale of yaw rotations.
angle_amount (float): Scale of orbital motion.
use_ros (bool): Whether to use ROS and publish on some topics.
ros_pose_topic (str): If ROS is used publish agent poses on this topic.
ros_world_frame (str): If ROS is used this is the id of the world frame.
"""
self._random_reset = random_reset
self._radius = radius
self._height = height
index1_range = 6
index2_range = 3
def _action_function(index1, index2):
new_theta = 2 * np.pi * index1 / float(index1_range)
new_location = self._get_orbit_location(new_theta)
new_yaw = new_theta + np.pi
new_yaw += (index2 - index2_range / 2) * np.pi / 2.
new_orientation_rpy = np.array([0, 0, new_yaw])
# print("new_theta:", new_theta)
# print("new_yaw:", new_yaw)
new_pose = self.Pose(new_location, new_orientation_rpy)
valid = True
return valid, new_pose
action_list = []
update_map_flags = []
action_rewards = []
for index1 in xrange(index1_range):
for index2 in xrange(index2_range):
# Need to create a new scope here to capture index1, index2 by value
def create_lambda(idx1, idx2):
def lambda_fn(pose):
return _action_function(idx1, idx2)
return lambda_fn
action_list.append(create_lambda(index1, index2))
update_map_flags.append(True)
action_rewards.append(-100.0)
self._obs_level = 2
self._obs_size_x = 8
self._obs_size_y = self._obs_size_x
self._obs_size_z = self._obs_size_x
super(FixedEnvironmentV2, self).__init__(
world_bounding_box,
action_list,
update_map_flags=update_map_flags,
action_rewards=action_rewards,
terminal_score_threshold=0.6,
**kwargs)
def _get_orbit_angle(self, pose):
theta = np.arctan2(pose.location()[1], pose.location()[0])
return theta
def _get_orbit_location(self, theta):
x = self._radius * np.cos(theta)
y = self._radius * np.sin(theta)
z = self._height
location = np.array([x, y, z])
return location
def get_observation_shapes(self):
return [
(self._obs_size_x, self._obs_size_y, self._obs_size_z, 2)
]
def _get_observation(self, pose):
level = self._obs_level
size_x = self._obs_size_x
size_y = self._obs_size_y
size_z = self._obs_size_z
# center = self.get_location()
# orientation_rpy = self.get_orientation_rpy()
center = np.array([0, 0, 2])
orientation_rpy = np.array([0, 0, 0])
# We query a subvolume of the occupancy map so that z-axis is aligned with gravity (roll = pitch = 0)
# query_orientation_rpy = np.array([0, 0, orientation_rpy[2]])
query_orientation_rpy = np.array([0, orientation_rpy[1], orientation_rpy[2]])
# TODO: Should be exposed in environment
res = self._mapper.perform_query_subvolume_rpy(
center, query_orientation_rpy, level, size_x, size_y, size_z)
occupancies = np.asarray(res.occupancies, dtype=np.float32)
occupancies_3d = np.reshape(occupancies, (size_x, size_y, size_z))
observation_certainties = np.asarray(res.observation_certainties, dtype=np.float32)
assert(False)
observation_certainties /= (10.0 * 2 ** self._obs_level)
observation_certainties = np.minimum(observation_certainties, 10.0)
observation_certainties_3d = np.reshape(observation_certainties, (size_x, size_y, size_z))
grid_3d = np.stack([occupancies_3d, observation_certainties_3d], axis=-1)
return [grid_3d]
# location = self.get_location()
# # orientation_rpy = self.get_orientation_rpy()
# orientation_quat = self.get_orientation_quat()
# # return [location, orientation_quat, occupancies_3d]
# # return [location, orientation_quat, grid_3d]
# previous_state_orientation_quat = math_utils.convert_rpy_to_quat(self._previous_state.orientation_rpy())
# orientation_quat *= np.sign(orientation_quat[3])
# previous_state_orientation_quat *= np.sign(previous_state_orientation_quat[3])
# return [location, orientation_quat, self._previous_state.location(), previous_state_orientation_quat]
def reset(self, **kwargs):
"""Resets the environment. Orbit angle is set to zero or randomly initialized."""
# if pose is None:
# pose = self.get_pose()
# theta = self._get_orbit_angle(pose)
# pose = self._get_orbit_pose(theta)
if self._random_reset:
theta = 2 * np.pi * np.random.rand()
if np.random.rand() < 0.25:
yaw = 2 * np.pi * np.random.rand()
else:
d_yaw = np.pi / 4 * (np.random.rand() - 0.5)
yaw = theta + np.pi + d_yaw
else:
theta = 0
yaw = theta + np.pi
location = self._get_orbit_location(theta)
roll = 0
pitch = 0
orientation_rpy = np.array([roll, pitch, yaw])
pose = self.Pose(location, orientation_rpy)
if self._random_reset:
action_index = np.random.randint(0, self.get_num_of_actions())
else:
action_index = 0
_, pose = self._action_list[action_index](pose)
# pose = self.simulate_action_on_pose(pose, action_index)
print("action_index:", action_index)
print("pose:", pose)
return super(FixedEnvironmentV2, self).reset(pose, **kwargs)
def is_action_allowed_on_pose(self, pose, action_index):
return True
| 44.132584
| 114
| 0.612149
| 2,450
| 19,639
| 4.605714
| 0.082041
| 0.048387
| 0.02632
| 0.013825
| 0.942042
| 0.924672
| 0.924672
| 0.924672
| 0.924672
| 0.924672
| 0
| 0.019004
| 0.295331
| 19,639
| 444
| 115
| 44.231982
| 0.796373
| 0.294822
| 0
| 0.89
| 0
| 0
| 0.004251
| 0.001566
| 0
| 0
| 0
| 0.004505
| 0.003333
| 1
| 0.103333
| false
| 0
| 0.013333
| 0.033333
| 0.22
| 0.02
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f3b832f4d3c13fa18558c7647652659a4d1acab
| 54,929
|
py
|
Python
|
python/models/logic_DIIN.py
|
Vincent717/Densely-Interactive-Inference-Network
|
e3038a41eddd06f3ea76a794ed272f235136b335
|
[
"Apache-2.0"
] | null | null | null |
python/models/logic_DIIN.py
|
Vincent717/Densely-Interactive-Inference-Network
|
e3038a41eddd06f3ea76a794ed272f235136b335
|
[
"Apache-2.0"
] | null | null | null |
python/models/logic_DIIN.py
|
Vincent717/Densely-Interactive-Inference-Network
|
e3038a41eddd06f3ea76a794ed272f235136b335
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
from util import blocks
from my.tensorflow.nn import softsel, get_logits, highway_network, multi_conv1d, linear, conv2d, cosine_similarity, variable_summaries, dense_logits, fuse_gate
from my.tensorflow import flatten, reconstruct, add_wd, exp_mask
import numpy as np
class MyModel(object):
def __init__(self, config, seq_length, emb_dim, hidden_dim, emb_train, embeddings = None, pred_size = 3, context_seq_len = None, query_seq_len = None):
## Define hyperparameters
# tf.reset_default_graph()
self.embedding_dim = emb_dim
self.dim = hidden_dim
self.sequence_length = seq_length
self.pred_size = pred_size
self.context_seq_len = context_seq_len
self.query_seq_len = query_seq_len
# self.config = config
## Define the placeholders
self.premise_x = tf.placeholder(tf.int32, [None, self.sequence_length], name='premise')
self.hypothesis_x = tf.placeholder(tf.int32, [None, self.sequence_length], name='hypothesis')
self.premise_pos = tf.placeholder(tf.int32, [None, self.sequence_length, 47], name='premise_pos')
self.hypothesis_pos = tf.placeholder(tf.int32, [None, self.sequence_length, 47], name='hypothesis_pos')
self.premise_char = tf.placeholder(tf.int32, [None, self.sequence_length, config.char_in_word_size], name='premise_char')
self.hypothesis_char = tf.placeholder(tf.int32, [None, self.sequence_length, config.char_in_word_size], name='hypothesis_char')
self.premise_exact_match = tf.placeholder(tf.int32, [None, self.sequence_length,1], name='premise_exact_match')
self.hypothesis_exact_match = tf.placeholder(tf.int32, [None, self.sequence_length,1], name='hypothesis_exact_match')
self.premise_dependency = tf.placeholder(tf.int32, [None, self.sequence_length, config.depend_size], name='premise_dependency')
self.hypothesis_dependency = tf.placeholder(tf.int32, [None, self.sequence_length, config.depend_size], name='hypothesis_dependency')
self.and_index = tf.placeholder(tf.int32, [None,], name='and_index')
#self.epoch = tf.placeholder(tf.int32, [1], name='epoch')
self.global_step = tf.Variable(0, name='global_step', trainable=False)
self.dropout_keep_rate = tf.train.exponential_decay(config.keep_rate, self.global_step, config.dropout_decay_step, config.dropout_decay_rate, staircase=False, name='dropout_keep_rate')
config.keep_rate = self.dropout_keep_rate
tf.summary.scalar('dropout_keep_rate', self.dropout_keep_rate)
self.y = tf.placeholder(tf.int32, [None], name='label_y')
self.keep_rate_ph = tf.placeholder(tf.float32, [], name='keep_prob')
self.is_train = tf.placeholder('bool', [], name='is_train')
## Fucntion for embedding lookup and dropout at embedding layer
def emb_drop(E, x):
emb = tf.nn.embedding_lookup(E, x)
emb_drop = tf.cond(self.is_train, lambda: tf.nn.dropout(emb, config.keep_rate), lambda: emb)
return emb_drop
# Get lengths of unpadded sentences
prem_seq_lengths, prem_mask = blocks.length(self.premise_x) # mask [N, L , 1]
hyp_seq_lengths, hyp_mask = blocks.length(self.hypothesis_x)
self.prem_mask = prem_mask
self.hyp_mask = hyp_mask
### Embedding layer ###
with tf.variable_scope("emb"):
with tf.variable_scope("emb_var"), tf.device("/cpu:0"):
self.E = tf.Variable(embeddings, trainable=emb_train)
premise_in = emb_drop(self.E, self.premise_x) #P
hypothesis_in = emb_drop(self.E, self.hypothesis_x) #H
with tf.variable_scope("char_emb"):
char_emb_mat = tf.get_variable("char_emb_mat", shape=[config.char_vocab_size, config.char_emb_size])
with tf.variable_scope("char") as scope:
char_pre = tf.nn.embedding_lookup(char_emb_mat, self.premise_char)
char_hyp = tf.nn.embedding_lookup(char_emb_mat, self.hypothesis_char)
filter_sizes = list(map(int, config.out_channel_dims.split(','))) #[100]
heights = list(map(int, config.filter_heights.split(','))) #[5]
assert sum(filter_sizes) == config.char_out_size, (filter_sizes, config.char_out_size)
with tf.variable_scope("conv") as scope:
conv_pre = multi_conv1d(char_pre, filter_sizes, heights, "VALID", self.is_train, config.keep_rate, scope='conv')
scope.reuse_variables()
conv_hyp = multi_conv1d(char_hyp, filter_sizes, heights, "VALID", self.is_train, config.keep_rate, scope='conv')
conv_pre = tf.reshape(conv_pre, [-1, self.sequence_length, config.char_out_size])
conv_hyp = tf.reshape(conv_hyp, [-1, self.sequence_length, config.char_out_size])
premise_in = tf.concat([premise_in, conv_pre], axis=2)
hypothesis_in = tf.concat([hypothesis_in, conv_hyp], axis=2)
# syntatic imformation
premise_in = tf.concat((premise_in, tf.cast(self.premise_pos, tf.float32)), axis=2)
hypothesis_in = tf.concat((hypothesis_in, tf.cast(self.hypothesis_pos, tf.float32)), axis=2)
premise_in = tf.concat([premise_in, tf.cast(self.premise_exact_match, tf.float32)], axis=2)
hypothesis_in = tf.concat([hypothesis_in, tf.cast(self.hypothesis_exact_match, tf.float32)], axis=2)
with tf.variable_scope("highway") as scope:
premise_in = highway_network(premise_in, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train)
scope.reuse_variables()
hypothesis_in = highway_network(hypothesis_in, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train)
## self attention process
def model_self_attention(config, premise_in, hypothesis_in, prem_mask, hyp_mask):
pre = premise_in
hyp = hypothesis_in
for i in range(config.self_att_enc_layers):
with tf.variable_scope(tf.get_variable_scope(), reuse=False):
p = self_attention_layer(config, self.is_train, pre, p_mask=prem_mask, scope="{}_layer_self_att_enc".format(i)) # [N, len, dim]
h = self_attention_layer(config, self.is_train, hyp, p_mask=hyp_mask, scope="{}_layer_self_att_enc_h".format(i))
pre = p
hyp = h
variable_summaries(p, "p_self_enc_summary_layer_{}".format(i))
variable_summaries(h, "h_self_enc_summary_layer_{}".format(i))
if config.use_depend:
pre1 = p
hyp1 = h
for i in range(config.denp_enc_layers):
with tf.variable_scope(tf.get_variable_scope(), reuse=False):
if config.use_depend:
p1 = dependency_layer(config, self.is_train, pre1, self.premise_dependency, p_mask=prem_mask, scope="{}_layer_dependency_enc".format(i))
h1 = dependency_layer(config, self.is_train, hyp1, self.hypothesis_dependency, p_mask=hyp_mask, scope="{}_layer_dependency_enc_h".format(i))
pre1 = p1
hyp1 = h1
variable_summaries(p, "p_denp_enc_summary_layer_{}".format(i))
variable_summaries(h, "h_denp_enc_summary_layer_{}".format(i))
p = tf.concat([p, p1], -1)
h = tf.concat([h, h1], -1)
return p, h
## main process : interaction + dense net
def model_one_side(config, main, support, main_length, support_length, main_mask, support_mask, scope):
bi_att_mx = bi_attention_mx(config, self.is_train, main, support, p_mask=main_mask, h_mask=support_mask, sequence_length=self.sequence_length) # [N, PL, HL]
bi_att_mx = tf.cond(self.is_train, lambda: tf.nn.dropout(bi_att_mx, config.keep_rate), lambda: bi_att_mx)
out_final = dense_net(config, bi_att_mx, self.is_train)
return out_final
# self attention
with tf.variable_scope("prepro") as scope:
p, h = model_self_attention(config, premise_in, hypothesis_in, prem_mask, hyp_mask)
# main
with tf.variable_scope("main") as scope:
premise_final = model_one_side(config, p, h, prem_seq_lengths, hyp_seq_lengths, prem_mask, hyp_mask, scope="premise_as_main")
f0 = premise_final
self.logits = linear(f0, self.pred_size ,True, bias_start=0.0, scope="logit", squeeze=False, wd=config.wd, input_keep_prob=config.keep_rate,
is_train=self.is_train)
tf.summary.histogram('logit_histogram', self.logits)
## Hu 2016
if config.use_logic:
minus_one = tf.Variable(-1)
def go_through_whole_model(premise_in, hypothesis_in, config=config, prem_mask=prem_mask, hyp_mask=hyp_mask, pred_size=self.pred_size, is_train=self.is_train):
# with tf.variable_scope("highway") as scope:
# premise_in = highway_network(premise_in, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train)
# scope.reuse_variables()
# hypothesis_in = highway_network(hypothesis_in, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train)
# self attention
#with tf.variable_scope("prepro") as scope:
# p, h = model_self_attention(config, premise_in, hypothesis_in, prem_mask, hyp_mask)
p, h = premise_in, hypothesis_in
# main
with tf.variable_scope("main", reuse=True) as scope:
premise_final = model_one_side(config, p, h, prem_seq_lengths, hyp_seq_lengths, prem_mask, hyp_mask, scope="premise_as_main")
f0 = premise_final
logits = linear(f0, pred_size ,True, bias_start=0.0, scope="logit", squeeze=False, wd=config.wd, input_keep_prob=config.keep_rate,
is_train=is_train)
logits = tf.nn.softmax(logits)
return logits
def cal_and_distr(sub_logits1, sub_logits2, c, and_mask, lambdal=1.):
"""
there are two rules:
AE: 1(y=Entailment) -> (p1_E V p2_E) ^ (p1_E V p2_E) -> 1(y=E)
AC: 1(y=Contradiction) -> (p1_C V p2_C) ^ (p1_C V p2_C) -> 1(y=C)
"""
# for rule in rules:
# if rule == 'AndE':
p1_add_p2 = sub_logits1 + sub_logits2
pre_distr = tf.minimum(p1_add_p2, 1) # 70x3
r_AE_y0 = (pre_distr[:,0] + 1) / 2 # 70x1
r_AC_y0 = (2 - pre_distr[:,2]) / 2 # 70x1
r_AN_y0 = 1
r_AE_y1 = (2 - pre_distr[:,0]) / 2
r_AC_y1 = (pre_distr[:,2] + 1) / 2
r_AN_y1 = p1_add_p2[:,1] / 2
r_AE_y2 = r_AE_y1
r_AC_y2 = r_AC_y0
r_AN_y2 = 1
r_y0 = c*lambdal* ( 3. - r_AE_y0 - r_AC_y0 - r_AN_y0) # 70x1
r_y1 = c*lambdal* ( 3. - r_AE_y1 - r_AC_y1 - r_AN_y1) # 70x1
r_y2 = c*lambdal* ( 3. - r_AE_y2 - r_AC_y2 - r_AN_y2) # 70x1
r_y0 = tf.where(tf.equal(and_mask, -1), tf.zeros_like(r_y0), r_y0) # mask
r_y1 = tf.where(tf.equal(and_mask, -1), tf.zeros_like(r_y1), r_y1)
r_y2 = tf.where(tf.equal(and_mask, -1), tf.zeros_like(r_y2), r_y2)
r_y0 = tf.reshape(r_y0, [-1, 1])
r_y1 = tf.reshape(r_y1, [-1, 1])
r_y2 = tf.reshape(r_y2, [-1, 1])
result = - tf.concat([r_y0, r_y1, r_y2], axis=1)
# tuncate
#distr_y0 = distr_all[:,0]
#distr_y0 = distr_y0.reshape([distr_y0.shape[0], 1])
#distr_y0_copies = tf.tile(distr_y0, [1, result.shape[1]])
#result -= distr_y0_copies
result = tf.maximum(tf.minimum(result, 60.), -60.)
return result
def slice_full(index, p):
p1_ = tf.slice(p, [0,0], [index, -1])
p2_ = tf.slice(p, [index,0], [-1, -1])
p1_full = tf.concat([p1_, tf.zeros_like(p2_)], axis=0)
p2_full = tf.concat([p2_, tf.zeros_like(p1_)], axis=0)
#p1_full = tf.reshape(p1_full, [-1, self.sequence_length, p1_full.shape[-1]])
#p2_full = tf.reshape(p2_full, [-1, self.sequence_length, p2_full.shape[-1]])
return p1_full, p2_full
def two_zero(p):
return tf.zeros_like(p), tf.zeros_like(p)
def slice_it_on(elems):
"""
index : ? x 1
p : ? x 48 x 448
h : ? x 48 x 448
since it is map_fn, so ? will be ignored
"""
index, p, h = elems
#index = index[0]
p1, p2 = tf.cond(tf.equal(index, minus_one), lambda: two_zero(p), lambda: slice_full(index, p))
sub_h = tf.cond(tf.equal(index, minus_one), lambda: tf.zeros_like(h), lambda: h)
return p1, p2, sub_h
# construct teacher network output
q_y_x = self.logits
p1, p2, sub_h = tf.map_fn(slice_it_on, (self.and_index, p, h), dtype=(tf.float32, tf.float32, tf.float32))
sub_logits1 = go_through_whole_model(p1, sub_h)
sub_logits2 = go_through_whole_model(p2, sub_h)
c = tf.constant(config.C , dtype=tf.float32, shape=[], name='c')
lambdal = tf.constant(config.lambdal , dtype=tf.float32, shape=[], name='lambdal')
distr = tf.exp(cal_and_distr(sub_logits1, sub_logits2, c, self.and_index, lambdal))
q_y_x = q_y_x * distr
self.q_y_x = q_y_x
# Define the cost function
if not config.use_logic:
self.total_cost = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.y, logits=self.logits))
self.acc = tf.reduce_mean(tf.cast(tf.equal(tf.arg_max(self.logits, dimension=1),tf.cast(self.y,tf.int64)), tf.float32))
tf.summary.scalar('acc', self.acc)
tf.summary.scalar('loss', self.total_cost)
#self.auc_ROC = tf.metrics.auc(tf.cast(self.y,tf.int64), tf.arg_max(self.logits, dimension=1), curve = 'ROC')
#self.auc_PR = tf.metrics.auc(tf.cast(self.y,tf.int64), tf.arg_max(self.logits, dimension=1), curve = 'PR')
#tf.summary.scalar('auc_ROC', self.auc_ROC)
#tf.summary.scalar('auc_PR', self.auc_PR)
# calculate acc
else:
get_pi = lambda x, y: x * 0.9**tf.cast(y/6750, tf.float32)
pi = get_pi(config.pi, self.global_step)
self.total_cost = (1-pi)*tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.y, logits=self.logits))
self.total_cost += pi*tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=tf.arg_max(q_y_x, dimension=1), logits=self.logits))
self.acc = tf.reduce_mean(tf.cast(tf.equal(tf.arg_max(self.logits, dimension=1),tf.cast(self.y,tf.int64)), tf.float32))
tf.summary.scalar('acc', self.acc)
tf.summary.scalar('loss', self.total_cost)
# L2 Loss
if config.l2_loss:
if config.sigmoid_growing_l2loss:
weights_added = tf.add_n([tf.nn.l2_loss(tensor) for tensor in tf.trainable_variables() if tensor.name.endswith("weights:0") and not tensor.name.endswith("weighted_sum/weights:0") or tensor.name.endswith('kernel:0')])
full_l2_step = tf.constant(config.weight_l2loss_step_full_reg , dtype=tf.int32, shape=[], name='full_l2reg_step')
full_l2_ratio = tf.constant(config.l2_regularization_ratio , dtype=tf.float32, shape=[], name='l2_regularization_ratio')
gs_flt = tf.cast(self.global_step , tf.float32)
half_l2_step_flt = tf.cast(full_l2_step / 2 ,tf.float32)
# (self.global_step - full_l2_step / 2)
# tf.cast((self.global_step - full_l2_step / 2) * 8, tf.float32) / tf.cast(full_l2_step / 2 ,tf.float32)
# l2loss_ratio = tf.sigmoid( tf.cast((self.global_step - full_l2_step / 2) * 8, tf.float32) / tf.cast(full_l2_step / 2 ,tf.float32)) * full_l2_ratio
l2loss_ratio = tf.sigmoid( ((gs_flt - half_l2_step_flt) * 8) / half_l2_step_flt) * full_l2_ratio
tf.summary.scalar('l2loss_ratio', l2loss_ratio)
l2loss = weights_added * l2loss_ratio
else:
l2loss = tf.add_n([tf.nn.l2_loss(tensor) for tensor in tf.trainable_variables() if tensor.name.endswith("weights:0") or tensor.name.endswith('kernel:0')]) * tf.constant(config.l2_regularization_ratio , dtype='float', shape=[], name='l2_regularization_ratio')
tf.summary.scalar('l2loss', l2loss)
self.total_cost += l2loss
# semantic Loss
if config.semantic_loss:
#semantic_loss = tf.add_n([tf.nn.l2_loss(tensor) for tensor in tf.trainable_variables()
# if tensor.name.endswith("weights:0") or tensor.name.endswith('kernel:0')])
def cal_exactly_one_loss(logits):
#semantic_loss = tf.Variable(tf.zeros([], dtype=np.float32), name='semantic_loss_term')
return tf.reduce_sum(-tf.log(logits[:,0]*(1-logits[:,1])*(1-logits[:,2]) +
logits[:,1]*(1-logits[:,0])*(1-logits[:,2]) +
logits[:,2]*(1-logits[:,0])*(1-logits[:,1])
))
def cal_logic_rules_loss(rules, logits):
def cal_logic_rule(ro, ls):
if ro == '0':
return ls[0]*(1-ls[1])*(1-ls[2])
elif ro == '1':
return ls[1]*(1-ls[0])*(1-ls[2])
elif ro == '2':
return ls[2]*(1-ls[0])*(1-ls[1])
return -tf.log(tf.add_n([cal_logic_rule(rule_output, logits) for rule_output in rules]))
#semantic_loss = cal_logic_rules_loss(self.rules_output, self.logits)
if config.use_exactly_one:
semantic_loss = cal_exactly_one_loss(self.logits)
semantic_loss = tf.reduce_mean(semantic_loss)
semantic_loss = semantic_loss * tf.constant(config.semantic_regularization_ratio , dtype='float', shape=[], name='semantic_regularization_ratio')
tf.summary.scalar('semantic loss', semantic_loss)
self.total_cost += semantic_loss
if config.wo_enc_sharing or config.wo_highway_sharing_but_penalize_diff:
diffs = []
for i in range(config.self_att_enc_layers):
for tensor in tf.trainable_variables():
print(tensor.name)
if tensor.name == "prepro/{}_layer_self_att_enc/self_attention/h_logits/first/kernel:0".format(i):
l_lg = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_attention/h_logits/first/kernel:0".format(i):
r_lg = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/lhs_1/kernel:0".format(i):
l_fg_lhs_1 = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/lhs_1/kernel:0".format(i):
r_fg_lhs_1= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/rhs_1/kernel:0".format(i):
l_fg_rhs_1= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/rhs_1/kernel:0".format(i):
r_fg_rhs_1= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/lhs_2/kernel:0".format(i):
l_fg_lhs_2= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/lhs_2/kernel:0".format(i):
r_fg_lhs_2= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/rhs_2/kernel:0".format(i):
l_fg_rhs_2= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/rhs_2/kernel:0".format(i):
r_fg_rhs_2= tensor
if config.two_gate_fuse_gate:
if tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/lhs_3/kernel:0".format(i):
l_fg_lhs_3 = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/lhs_3/kernel:0".format(i):
r_fg_lhs_3 = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/rhs_3/kernel:0".format(i):
l_fg_rhs_3 = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/rhs_3/kernel:0".format(i):
r_fg_rhs_3 = tensor
diffs += [l_lg - r_lg, l_fg_lhs_1 - r_fg_lhs_1, l_fg_rhs_1 - r_fg_rhs_1, l_fg_lhs_2 - r_fg_lhs_2, l_fg_rhs_2 - r_fg_rhs_2]
if config.two_gate_fuse_gate:
diffs += [l_fg_lhs_3 - r_fg_lhs_3, l_fg_rhs_3 - r_fg_rhs_3]
diff_loss = tf.add_n([tf.nn.l2_loss(tensor) for tensor in diffs]) * tf.constant(config.diff_penalty_loss_ratio , dtype='float', shape=[], name='diff_penalty_loss_ratio')
tf.summary.scalar('diff_penalty_loss', diff_loss)
self.total_cost += diff_loss
self.summary = tf.summary.merge_all()
total_parameters = 0
for v in tf.global_variables():
if not v.name.endswith("weights:0") and not v.name.endswith("biases:0") and not v.name.endswith('kernel:0') and not v.name.endswith('bias:0'):
continue
print(v.name)
# print(type(v.name))
shape = v.get_shape().as_list()
param_num = 1
for dim in shape:
param_num *= dim
print(param_num)
total_parameters += param_num
print(total_parameters)
class MyModelWn(object):
def __init__(self, config, seq_length, emb_dim, hidden_dim, emb_train, embeddings = None, pred_size = 3, context_seq_len = None, query_seq_len = None):
## Define hyperparameters
# tf.reset_default_graph()
self.embedding_dim = emb_dim
self.dim = hidden_dim
self.sequence_length = seq_length
self.pred_size = pred_size
self.context_seq_len = context_seq_len
self.query_seq_len = query_seq_len
# self.config = config
## Define the placeholders
self.premise_x = tf.placeholder(tf.int32, [None, self.sequence_length], name='premise')
self.hypothesis_x = tf.placeholder(tf.int32, [None, self.sequence_length], name='hypothesis')
self.premise_pos = tf.placeholder(tf.int32, [None, self.sequence_length, 47], name='premise_pos')
self.hypothesis_pos = tf.placeholder(tf.int32, [None, self.sequence_length, 47], name='hypothesis_pos')
self.premise_char = tf.placeholder(tf.int32, [None, self.sequence_length, config.char_in_word_size], name='premise_char')
self.hypothesis_char = tf.placeholder(tf.int32, [None, self.sequence_length, config.char_in_word_size], name='hypothesis_char')
self.premise_exact_match = tf.placeholder(tf.int32, [None, self.sequence_length,1], name='premise_exact_match')
self.hypothesis_exact_match = tf.placeholder(tf.int32, [None, self.sequence_length,1], name='hypothesis_exact_match')
self.wordnet_rel = tf.placeholder(tf.float32, [None, self.sequence_length, self.sequence_length, 5], name='wordnet_rel')
self.premise_dependency = tf.placeholder(tf.int32, [None, self.sequence_length, config.depend_size], name='premise_dependency')
self.hypothesis_dependency = tf.placeholder(tf.int32, [None, self.sequence_length, config.depend_size], name='hypothesis_dependency')
self.and_index = tf.placeholder(tf.int32, [None,], name='and_index')
#self.epoch = tf.placeholder(tf,int32, [1], name='epoch')
self.global_step = tf.Variable(0, name='global_step', trainable=False)
self.dropout_keep_rate = tf.train.exponential_decay(config.keep_rate, self.global_step, config.dropout_decay_step, config.dropout_decay_rate, staircase=False, name='dropout_keep_rate')
config.keep_rate = self.dropout_keep_rate
tf.summary.scalar('dropout_keep_rate', self.dropout_keep_rate)
self.y = tf.placeholder(tf.int32, [None], name='label_y')
self.keep_rate_ph = tf.placeholder(tf.float32, [], name='keep_prob')
self.is_train = tf.placeholder('bool', [], name='is_train')
## Fucntion for embedding lookup and dropout at embedding layer
def emb_drop(E, x):
emb = tf.nn.embedding_lookup(E, x)
emb_drop = tf.cond(self.is_train, lambda: tf.nn.dropout(emb, config.keep_rate), lambda: emb)
return emb_drop
# Get lengths of unpadded sentences
prem_seq_lengths, prem_mask = blocks.length(self.premise_x) # mask [N, L , 1]
hyp_seq_lengths, hyp_mask = blocks.length(self.hypothesis_x)
self.prem_mask = prem_mask
self.hyp_mask = hyp_mask
### Embedding layer ###
with tf.variable_scope("emb"):
with tf.variable_scope("emb_var"), tf.device("/cpu:0"):
self.E = tf.Variable(embeddings, trainable=emb_train)
premise_in = emb_drop(self.E, self.premise_x) #P
hypothesis_in = emb_drop(self.E, self.hypothesis_x) #H
with tf.variable_scope("char_emb"):
char_emb_mat = tf.get_variable("char_emb_mat", shape=[config.char_vocab_size, config.char_emb_size])
with tf.variable_scope("char") as scope:
char_pre = tf.nn.embedding_lookup(char_emb_mat, self.premise_char)
char_hyp = tf.nn.embedding_lookup(char_emb_mat, self.hypothesis_char)
filter_sizes = list(map(int, config.out_channel_dims.split(','))) #[100]
heights = list(map(int, config.filter_heights.split(','))) #[5]
assert sum(filter_sizes) == config.char_out_size, (filter_sizes, config.char_out_size)
with tf.variable_scope("conv") as scope:
conv_pre = multi_conv1d(char_pre, filter_sizes, heights, "VALID", self.is_train, config.keep_rate, scope='conv')
scope.reuse_variables()
conv_hyp = multi_conv1d(char_hyp, filter_sizes, heights, "VALID", self.is_train, config.keep_rate, scope='conv')
conv_pre = tf.reshape(conv_pre, [-1, self.sequence_length, config.char_out_size])
conv_hyp = tf.reshape(conv_hyp, [-1, self.sequence_length, config.char_out_size])
premise_in = tf.concat([premise_in, conv_pre], axis=2)
hypothesis_in = tf.concat([hypothesis_in, conv_hyp], axis=2)
# syntatic imformation
premise_in = tf.concat((premise_in, tf.cast(self.premise_pos, tf.float32)), axis=2)
hypothesis_in = tf.concat((hypothesis_in, tf.cast(self.hypothesis_pos, tf.float32)), axis=2)
premise_in = tf.concat([premise_in, tf.cast(self.premise_exact_match, tf.float32)], axis=2)
hypothesis_in = tf.concat([hypothesis_in, tf.cast(self.hypothesis_exact_match, tf.float32)], axis=2)
with tf.variable_scope("highway") as scope:
premise_in = highway_network(premise_in, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train)
scope.reuse_variables()
hypothesis_in = highway_network(hypothesis_in, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train)
## self attention process
def model_self_attention(config, premise_in, hypothesis_in, prem_mask, hyp_mask):
pre = premise_in
hyp = hypothesis_in
for i in range(config.self_att_enc_layers):
with tf.variable_scope(tf.get_variable_scope(), reuse=False):
p = self_attention_layer(config, self.is_train, pre, p_mask=prem_mask, scope="{}_layer_self_att_enc".format(i)) # [N, len, dim]
h = self_attention_layer(config, self.is_train, hyp, p_mask=hyp_mask, scope="{}_layer_self_att_enc_h".format(i))
pre = p
hyp = h
variable_summaries(p, "p_self_enc_summary_layer_{}".format(i))
variable_summaries(h, "h_self_enc_summary_layer_{}".format(i))
if config.use_depend:
pre1 = p
hyp1 = h
for i in range(config.denp_enc_layers):
with tf.variable_scope(tf.get_variable_scope(), reuse=False):
if config.use_depend:
p1 = dependency_layer(config, self.is_train, pre1, self.premise_dependency, p_mask=prem_mask, scope="{}_layer_dependency_enc".format(i))
h1 = dependency_layer(config, self.is_train, hyp1, self.hypothesis_dependency, p_mask=hyp_mask, scope="{}_layer_dependency_enc_h".format(i))
pre1 = p1
hyp1 = h1
variable_summaries(p, "p_denp_enc_summary_layer_{}".format(i))
variable_summaries(h, "h_denp_enc_summary_layer_{}".format(i))
p = tf.concat([p, p1], -1)
h = tf.concat([h, h1], -1)
return p, h
## main process : interaction + dense net
def model_one_side(config, main, support, main_length, support_length, main_mask, support_mask, scope):
bi_att_mx = bi_attention_mx(config, self.is_train, main, support, p_mask=main_mask, h_mask=support_mask, sequence_length=self.sequence_length) # [N, PL, HL]
bi_att_mx = tf.cond(self.is_train, lambda: tf.nn.dropout(bi_att_mx, config.keep_rate), lambda: bi_att_mx)
out_final = dense_net(config, bi_att_mx, self.is_train)
return out_final
# self attention
with tf.variable_scope("prepro") as scope:
p, h = model_self_attention(config, premise_in, hypothesis_in, prem_mask, hyp_mask)
# main
with tf.variable_scope("main") as scope:
premise_final = model_one_side(config, p, h, prem_seq_lengths, hyp_seq_lengths, prem_mask, hyp_mask, scope="premise_as_main")
f0 = premise_final
self.logits = linear(f0, self.pred_size ,True, bias_start=0.0, scope="logit", squeeze=False, wd=config.wd, input_keep_prob=config.keep_rate,
is_train=self.is_train)
tf.summary.histogram('logit_histogram', self.logits)
## Hu 2016
if config.use_logic:
minus_one = tf.Variable(-1)
def go_through_whole_model(premise_in, hypothesis_in, config=config, prem_mask=prem_mask, hyp_mask=hyp_mask, pred_size=self.pred_size, is_train=self.is_train):
# with tf.variable_scope("highway") as scope:
# premise_in = highway_network(premise_in, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train)
# scope.reuse_variables()
# hypothesis_in = highway_network(hypothesis_in, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train)
# self attention
#with tf.variable_scope("prepro") as scope:
# p, h = model_self_attention(config, premise_in, hypothesis_in, prem_mask, hyp_mask)
p, h = premise_in, hypothesis_in
# main
with tf.variable_scope("main", reuse=True) as scope:
premise_final = model_one_side(config, p, h, prem_seq_lengths, hyp_seq_lengths, prem_mask, hyp_mask, scope="premise_as_main")
f0 = premise_final
logits = linear(f0, pred_size ,True, bias_start=0.0, scope="logit", squeeze=False, wd=config.wd, input_keep_prob=config.keep_rate,
is_train=is_train)
logits = tf.nn.softmax(logits)
return logits
def cal_and_distr(sub_logits1, sub_logits2, c, and_mask, lambdal=1.):
"""
there are two rules:
AE: 1(y=Entailment) -> (p1_E V p2_E) ^ (p1_E V p2_E) -> 1(y=E)
AC: 1(y=Contradiction) -> (p1_C V p2_C) ^ (p1_C V p2_C) -> 1(y=C)
"""
# for rule in rules:
# if rule == 'AndE':
p1_add_p2 = sub_logits1 + sub_logits2
pre_distr = tf.minimum(p1_add_p2, 1) # 70x3
r_AE_y0 = (pre_distr[:,0] + 1) / 2 # 70x1
r_AC_y0 = (2 - pre_distr[:,2]) / 2 # 70x1
r_AN_y0 = 1
r_AE_y1 = (2 - pre_distr[:,0]) / 2
r_AC_y1 = (pre_distr[:,2] + 1) / 2
r_AN_y1 = p1_add_p2[:,1] / 2
r_AE_y2 = r_AE_y1
r_AC_y2 = r_AC_y0
r_AN_y2 = 1
r_y0 = c*lambdal* ( 3. - r_AE_y0 - r_AC_y0 - r_AN_y0) # 70x1
r_y1 = c*lambdal* ( 3. - r_AE_y1 - r_AC_y1 - r_AN_y1) # 70x1
r_y2 = c*lambdal* ( 3. - r_AE_y2 - r_AC_y2 - r_AN_y2) # 70x1
r_y0 = tf.where(tf.equal(and_mask, -1), tf.zeros_like(r_y0), r_y0) # mask
r_y1 = tf.where(tf.equal(and_mask, -1), tf.zeros_like(r_y1), r_y1)
r_y2 = tf.where(tf.equal(and_mask, -1), tf.zeros_like(r_y2), r_y2)
r_y0 = tf.reshape(r_y0, [-1, 1])
r_y1 = tf.reshape(r_y1, [-1, 1])
r_y2 = tf.reshape(r_y2, [-1, 1])
result = - tf.concat([r_y0, r_y1, r_y2], axis=1)
# tuncate
#distr_y0 = distr_all[:,0]
#distr_y0 = distr_y0.reshape([distr_y0.shape[0], 1])
#distr_y0_copies = tf.tile(distr_y0, [1, result.shape[1]])
#result -= distr_y0_copies
result = tf.maximum(tf.minimum(result, 60.), -60.)
return result
def slice_full(index, p):
p1_ = tf.slice(p, [0,0], [index, -1])
p2_ = tf.slice(p, [index,0], [-1, -1])
p1_full = tf.concat([p1_, tf.zeros_like(p2_)], axis=0)
p2_full = tf.concat([p2_, tf.zeros_like(p1_)], axis=0)
return p1_full, p2_full
def two_zero(p):
return tf.zeros_like(p), tf.zeros_like(p)
def slice_it_on(elems):
"""
index : ? x 1
p : ? x 48 x 448
h : ? x 48 x 448
since it is map_fn, so ? will be ignored
"""
index, p, h = elems
#index = index[0]
p1, p2 = tf.cond(tf.equal(index, minus_one), lambda: two_zero(p), lambda: slice_full(index, p))
sub_h = tf.cond(tf.equal(index, minus_one), lambda: tf.zeros_like(h), lambda: h)
return p1, p2, sub_h
# construct teacher network output
q_y_x = self.logits
p1, p2, sub_h = tf.map_fn(slice_it_on, (self.and_index, p, h), dtype=(tf.float32, tf.float32, tf.float32))
sub_logits1 = go_through_whole_model(p1, sub_h)
sub_logits2 = go_through_whole_model(p2, sub_h)
c = tf.constant(config.C , dtype=tf.float32, shape=[], name='c')
lambdal = tf.constant(config.lambdal , dtype=tf.float32, shape=[], name='lambdal')
distr = tf.exp(cal_and_distr(sub_logits1, sub_logits2, c, self.and_index, lambdal))
q_y_x = q_y_x * distr
self.q_y_x = q_y_x
# Define the cost function
if not config.use_logic:
self.total_cost = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.y, logits=self.logits))
self.acc = tf.reduce_mean(tf.cast(tf.equal(tf.arg_max(self.logits, dimension=1),tf.cast(self.y,tf.int64)), tf.float32))
tf.summary.scalar('acc', self.acc)
tf.summary.scalar('loss', self.total_cost)
#self.auc_ROC = tf.metrics.auc(tf.cast(self.y,tf.int64), tf.arg_max(self.logits, dimension=1), curve = 'ROC')
#self.auc_PR = tf.metrics.auc(tf.cast(self.y,tf.int64), tf.arg_max(self.logits, dimension=1), curve = 'PR')
#tf.summary.scalar('auc_ROC', self.auc_ROC)
#tf.summary.scalar('auc_PR', self.auc_PR)
# calculate acc
else:
get_pi = lambda x, y: x * 0.9**tf.cast(y/6750, tf.float32) # when batch size is 70: 6750; 48:9850
pi = get_pi(config.pi, self.global_step)
self.total_cost = (1-pi)*tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.y, logits=self.logits))
self.total_cost += pi*tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=tf.arg_max(q_y_x, dimension=1), logits=self.logits))
self.acc = tf.reduce_mean(tf.cast(tf.equal(tf.arg_max(self.logits, dimension=1),tf.cast(self.y,tf.int64)), tf.float32))
tf.summary.scalar('acc', self.acc)
tf.summary.scalar('loss', self.total_cost)
# L2 Loss
if config.l2_loss:
if config.sigmoid_growing_l2loss:
weights_added = tf.add_n([tf.nn.l2_loss(tensor) for tensor in tf.trainable_variables() if tensor.name.endswith("weights:0") and not tensor.name.endswith("weighted_sum/weights:0") or tensor.name.endswith('kernel:0')])
full_l2_step = tf.constant(config.weight_l2loss_step_full_reg , dtype=tf.int32, shape=[], name='full_l2reg_step')
full_l2_ratio = tf.constant(config.l2_regularization_ratio , dtype=tf.float32, shape=[], name='l2_regularization_ratio')
gs_flt = tf.cast(self.global_step , tf.float32)
half_l2_step_flt = tf.cast(full_l2_step / 2 ,tf.float32)
# (self.global_step - full_l2_step / 2)
# tf.cast((self.global_step - full_l2_step / 2) * 8, tf.float32) / tf.cast(full_l2_step / 2 ,tf.float32)
# l2loss_ratio = tf.sigmoid( tf.cast((self.global_step - full_l2_step / 2) * 8, tf.float32) / tf.cast(full_l2_step / 2 ,tf.float32)) * full_l2_ratio
l2loss_ratio = tf.sigmoid( ((gs_flt - half_l2_step_flt) * 8) / half_l2_step_flt) * full_l2_ratio
tf.summary.scalar('l2loss_ratio', l2loss_ratio)
l2loss = weights_added * l2loss_ratio
else:
l2loss = tf.add_n([tf.nn.l2_loss(tensor) for tensor in tf.trainable_variables() if tensor.name.endswith("weights:0") or tensor.name.endswith('kernel:0')]) * tf.constant(config.l2_regularization_ratio , dtype='float', shape=[], name='l2_regularization_ratio')
tf.summary.scalar('l2loss', l2loss)
self.total_cost += l2loss
# semantic Loss
if config.semantic_loss:
#semantic_loss = tf.add_n([tf.nn.l2_loss(tensor) for tensor in tf.trainable_variables()
# if tensor.name.endswith("weights:0") or tensor.name.endswith('kernel:0')])
def cal_exactly_one_loss(logits):
#semantic_loss = tf.Variable(tf.zeros([], dtype=np.float32), name='semantic_loss_term')
return tf.reduce_sum(-tf.log(logits[:,0]*(1-logits[:,1])*(1-logits[:,2]) +
logits[:,1]*(1-logits[:,0])*(1-logits[:,2]) +
logits[:,2]*(1-logits[:,0])*(1-logits[:,1])
))
def cal_logic_rules_loss(rules, logits):
def cal_logic_rule(ro, ls):
if ro == '0':
return ls[0]*(1-ls[1])*(1-ls[2])
elif ro == '1':
return ls[1]*(1-ls[0])*(1-ls[2])
elif ro == '2':
return ls[2]*(1-ls[0])*(1-ls[1])
return -tf.log(tf.add_n([cal_logic_rule(rule_output, logits) for rule_output in rules]))
#semantic_loss = cal_logic_rules_loss(self.rules_output, self.logits)
if config.use_exactly_one:
semantic_loss = cal_exactly_one_loss(self.logits)
semantic_loss = tf.reduce_mean(semantic_loss)
semantic_loss = semantic_loss * tf.constant(config.semantic_regularization_ratio , dtype='float', shape=[], name='semantic_regularization_ratio')
tf.summary.scalar('semantic loss', semantic_loss)
self.total_cost += semantic_loss
if config.wo_enc_sharing or config.wo_highway_sharing_but_penalize_diff:
diffs = []
for i in range(config.self_att_enc_layers):
for tensor in tf.trainable_variables():
print(tensor.name)
if tensor.name == "prepro/{}_layer_self_att_enc/self_attention/h_logits/first/kernel:0".format(i):
l_lg = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_attention/h_logits/first/kernel:0".format(i):
r_lg = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/lhs_1/kernel:0".format(i):
l_fg_lhs_1 = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/lhs_1/kernel:0".format(i):
r_fg_lhs_1= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/rhs_1/kernel:0".format(i):
l_fg_rhs_1= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/rhs_1/kernel:0".format(i):
r_fg_rhs_1= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/lhs_2/kernel:0".format(i):
l_fg_lhs_2= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/lhs_2/kernel:0".format(i):
r_fg_lhs_2= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/rhs_2/kernel:0".format(i):
l_fg_rhs_2= tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/rhs_2/kernel:0".format(i):
r_fg_rhs_2= tensor
if config.two_gate_fuse_gate:
if tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/lhs_3/kernel:0".format(i):
l_fg_lhs_3 = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/lhs_3/kernel:0".format(i):
r_fg_lhs_3 = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc/self_att_fuse_gate/rhs_3/kernel:0".format(i):
l_fg_rhs_3 = tensor
elif tensor.name == "prepro/{}_layer_self_att_enc_h/self_att_fuse_gate/rhs_3/kernel:0".format(i):
r_fg_rhs_3 = tensor
diffs += [l_lg - r_lg, l_fg_lhs_1 - r_fg_lhs_1, l_fg_rhs_1 - r_fg_rhs_1, l_fg_lhs_2 - r_fg_lhs_2, l_fg_rhs_2 - r_fg_rhs_2]
if config.two_gate_fuse_gate:
diffs += [l_fg_lhs_3 - r_fg_lhs_3, l_fg_rhs_3 - r_fg_rhs_3]
diff_loss = tf.add_n([tf.nn.l2_loss(tensor) for tensor in diffs]) * tf.constant(config.diff_penalty_loss_ratio , dtype='float', shape=[], name='diff_penalty_loss_ratio')
tf.summary.scalar('diff_penalty_loss', diff_loss)
self.total_cost += diff_loss
self.summary = tf.summary.merge_all()
total_parameters = 0
for v in tf.global_variables():
if not v.name.endswith("weights:0") and not v.name.endswith("biases:0") and not v.name.endswith('kernel:0') and not v.name.endswith('bias:0'):
continue
print(v.name)
# print(type(v.name))
shape = v.get_shape().as_list()
param_num = 1
for dim in shape:
param_num *= dim
print(param_num)
total_parameters += param_num
print(total_parameters)
def bi_attention_mx(config, is_train, p, h, p_mask=None, h_mask=None, scope=None, wn_rel=None, sequence_length=48): #[N, L, 2d]
with tf.variable_scope(scope or "dense_logit_bi_attention"):
PL = p.get_shape().as_list()[1]
if PL == None:
PL = sequence_length
HL = h.get_shape().as_list()[1]
p_aug = tf.tile(tf.expand_dims(p, 2), [1,1,HL,1])
h_aug = tf.tile(tf.expand_dims(h, 1), [1,PL,1,1]) #[N, PL, HL, 2d]
if p_mask is None:
ph_mask = None
else:
p_mask_aug = tf.reduce_any(tf.cast(tf.tile(tf.expand_dims(p_mask, 2), [1, 1, HL, 1]), tf.bool), axis=3)
h_mask_aug = tf.reduce_any(tf.cast(tf.tile(tf.expand_dims(h_mask, 1), [1, PL, 1, 1]), tf.bool), axis=3)
ph_mask = p_mask_aug & h_mask_aug
ph_mask = None
h_logits = p_aug * h_aug # [N, PL, HL, 2d]
if config.use_more_interaction:
h_logits_sub = p_aug - h_aug
h_logits = tf.concat([h_logits, h_logits_sub], -1) # [N, PL, HL, 2d+2d]
if not config.concat_after_conv and wn_rel is not None:
h_logits = tf.concat([h_logits, wn_rel], -1) # [N, PL, HL, 2d+2d+5]
return h_logits
def self_attention(config, is_train, p, p_mask=None, scope=None): #[N, L, 2d]
with tf.variable_scope(scope or "self_attention"):
PL = p.get_shape().as_list()[1]
dim = p.get_shape().as_list()[-1]
# HL = tf.shape(h)[1]
p_aug_1 = tf.tile(tf.expand_dims(p, 2), [1,1,PL,1])
p_aug_2 = tf.tile(tf.expand_dims(p, 1), [1,PL,1,1]) #[N, PL, HL, 2d]
if p_mask is None:
ph_mask = None
else:
p_mask_aug_1 = tf.reduce_any(tf.cast(tf.tile(tf.expand_dims(p_mask, 2), [1, 1, PL, 1]), tf.bool), axis=3)
p_mask_aug_2 = tf.reduce_any(tf.cast(tf.tile(tf.expand_dims(p_mask, 1), [1, PL, 1, 1]), tf.bool), axis=3)
self_mask = p_mask_aug_1 & p_mask_aug_2
h_logits = get_logits([p_aug_1, p_aug_2], None, True, wd=config.wd, mask=self_mask,
is_train=is_train, func=config.self_att_logit_func, scope='h_logits') # [N, PL, HL]
self_att = softsel(p_aug_2, h_logits)
return self_att
def self_attention_layer(config, is_train, p, p_mask=None, scope=None):
with tf.variable_scope(scope or "self_attention_layer"):
PL = tf.shape(p)[1]
# HL = tf.shape(h)[1]
# if config.q2c_att or config.c2q_att:
self_att = self_attention(config, is_train, p, p_mask=p_mask)
print("self_att shape")
print(self_att.get_shape())
p0 = fuse_gate(config, is_train, p, self_att, scope="self_att_fuse_gate")
return p0
def get_denpendency(config, is_train, p, p_denp, p_mask=None, scope=None): #[N, L, 2d]
"""
p: [70,48,448]
p_denp: [70,48,48]
"""
with tf.variable_scope(scope or "get_denpendency"):
PL = p.get_shape().as_list()[1] # 48
dim = p.get_shape().as_list()[-1] # 448
# HL = tf.shape(h)[1]
p_wp = linear([p], config.dependency_hidden_size, True, scope='get_denpendency_wp', wd=config.wd, is_train=is_train) # 70*48*448
ci_1 = tf.tile(tf.expand_dims(p_denp, 3), [1,1,1,config.dependency_hidden_size]) # 70*48*48*448
ci_2 = tf.tile(tf.expand_dims(p, 2), [1,1,PL,1])
ci = tf.to_float(ci_1) * ci_2
cis = tf.reduce_sum(ci, axis=2) # 70*48*448
c_wc = linear([cis], config.dependency_hidden_size, True, scope='get_denpendency_wc', wd=config.wd, is_train=is_train) # 70*48*448
logits = p_wp + c_wc
if p_mask is not None:
logits = exp_mask(logits, p_mask)
logits = tf.nn.relu(logits)
return logits
def to_one_hot(t):
"""
t: 70*48*6
return: 70*48*48
"""
PL = t.get_shape().as_list()[1]
one_hots = tf.one_hot(t, depth = PL) # 70*48*6*48
return tf.reduce_sum(one_hots, axis=2)
def get_denpendency1(config, is_train, p, p_denp, p_mask=None, scope=None): #[N, L, 2d]
"""
p: [70,48,448]
p_denp: [70,48,6]
"""
with tf.variable_scope(scope or "get_denpendency"):
PL = p.get_shape().as_list()[1] # 48
dim = p.get_shape().as_list()[-1] # 448
# HL = tf.shape(h)[1]
p_denp = to_one_hot(p_denp)
p_wp = linear([p], config.dependency_hidden_size, True, scope='get_denpendency_wp', wd=config.wd, is_train=is_train) # 70*48*448
ci_1 = tf.tile(tf.expand_dims(p_denp, 3), [1,1,1,dim]) # 70*48*48*448
ci_2 = tf.tile(tf.expand_dims(p, 2), [1,1,PL,1])
ci = tf.to_float(ci_1) * ci_2
cis = tf.reduce_sum(ci, axis=2) # 70*48*448
c_wc = linear([cis], config.dependency_hidden_size, True, scope='get_denpendency_wc', wd=config.wd, is_train=is_train) # 70*48*448
logits = p_wp + c_wc
if p_mask is not None:
logits = exp_mask(logits, p_mask)
logits = tf.nn.relu(logits)
return logits
def dependency_layer(config, is_train, p, p_denp, p_mask=None, scope=None):
with tf.variable_scope(scope or "dependency_layer"):
PL = tf.shape(p)[1]
# HL = tf.shape(h)[1]
# if config.q2c_att or config.c2q_att:
denp = get_denpendency1(config, is_train, p, p_denp, p_mask=p_mask)
print("dependency shape")
print(denp.get_shape())
p0 = denp
#p0 = fuse_gate(config, is_train, p, denp, scope="dependency_fuse_gate")
return p0
# def bi_attention(config, is_train, p, h, p_mask=None, h_mask=None, scope=None, h_value = None): #[N, L, 2d]
# with tf.variable_scope(scope or "bi_attention"):
# PL = tf.shape(p)[1]
# HL = tf.shape(h)[1]
# p_aug = tf.tile(tf.expand_dims(p, 2), [1,1,HL,1])
# h_aug = tf.tile(tf.expand_dims(h, 1), [1,PL,1,1]) #[N, PL, HL, 2d]
# if p_mask is None:
# ph_mask = None
# else:
# p_mask_aug = tf.reduce_any(tf.cast(tf.tile(tf.expand_dims(p_mask, 2), [1, 1, HL, 1]), tf.bool), axis=3)
# h_mask_aug = tf.reduce_any(tf.cast(tf.tile(tf.expand_dims(h_mask, 1), [1, PL, 1, 1]), tf.bool), axis=3)
# ph_mask = p_mask_aug & h_mask_aug
# h_logits = get_logits([p_aug, h_aug], None, True, wd=config.wd, mask=ph_mask,
# is_train=is_train, func="mul_linear", scope='h_logits') # [N, PL, HL]
# h_a = softsel(h_aug, h_logits)
# p_a = softsel(p, tf.reduce_max(h_logits, 2)) # [N, 2d]
# p_a = tf.tile(tf.expand_dims(p_a, 1), [1, PL, 1]) #
# return h_a, p_a
def dense_net(config, denseAttention, is_train, wn_rel=None):
with tf.variable_scope("dense_net"):
dim = denseAttention.get_shape().as_list()[-1]
print('denset net dim: %s' % dim)
act = tf.nn.relu if config.first_scale_down_layer_relu else None
fm = tf.contrib.layers.convolution2d(denseAttention, int(dim * config.dense_net_first_scale_down_ratio), config.first_scale_down_kernel, padding="SAME", activation_fn = act)
if config.concat_after_conv and wn_rel is not None:
fm = tf.concat([fm, wn_rel], -1) # [N, PL, HL, 2d*scale_down_ratio+5]
fm = dense_net_block(config, fm, config.dense_net_growth_rate, config.dense_net_layers, config.dense_net_kernel_size, is_train ,scope = "first_dense_net_block")
fm = dense_net_transition_layer(config, fm, config.dense_net_transition_rate, scope='second_transition_layer')
fm = dense_net_block(config, fm, config.dense_net_growth_rate, config.dense_net_layers, config.dense_net_kernel_size, is_train ,scope = "second_dense_net_block")
fm = dense_net_transition_layer(config, fm, config.dense_net_transition_rate, scope='third_transition_layer')
fm = dense_net_block(config, fm, config.dense_net_growth_rate, config.dense_net_layers, config.dense_net_kernel_size, is_train ,scope = "third_dense_net_block")
fm = dense_net_transition_layer(config, fm, config.dense_net_transition_rate, scope='fourth_transition_layer')
shape_list = fm.get_shape().as_list()
print(shape_list)
out_final = tf.reshape(fm, [-1, shape_list[1]*shape_list[2]*shape_list[3]])
return out_final
def dense_net_block(config, feature_map, growth_rate, layers, kernel_size, is_train, padding="SAME", act=tf.nn.relu, scope=None):
with tf.variable_scope(scope or "dense_net_block"):
conv2d = tf.contrib.layers.convolution2d
dim = feature_map.get_shape().as_list()[-1]
list_of_features = [feature_map]
features = feature_map
for i in range(layers):
ft = conv2d(features, growth_rate, (kernel_size, kernel_size), padding=padding, activation_fn=act)
list_of_features.append(ft)
features = tf.concat(list_of_features, axis=3)
print("dense net block out shape")
print(features.get_shape().as_list())
return features
def dense_net_transition_layer(config, feature_map, transition_rate, scope=None):
with tf.variable_scope(scope or "transition_layer"):
out_dim = int(feature_map.get_shape().as_list()[-1] * transition_rate)
feature_map = tf.contrib.layers.convolution2d(feature_map, out_dim, 1, padding="SAME", activation_fn = None)
feature_map = tf.nn.max_pool(feature_map, [1,2,2,1],[1,2,2,1], "VALID")
print("Transition Layer out shape")
print(feature_map.get_shape().as_list())
return feature_map
| 55.427851
| 274
| 0.60032
| 7,905
| 54,929
| 3.868817
| 0.049209
| 0.018311
| 0.01648
| 0.022365
| 0.929994
| 0.919923
| 0.911062
| 0.905405
| 0.8988
| 0.896446
| 0
| 0.030564
| 0.277485
| 54,929
| 990
| 275
| 55.483838
| 0.740041
| 0.122595
| 0
| 0.85474
| 0
| 0
| 0.084065
| 0.055987
| 0
| 0
| 0
| 0
| 0.003058
| 1
| 0.051988
| false
| 0
| 0.007645
| 0.006116
| 0.117737
| 0.027523
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f40160841c015fe4bf027245ece92b0590bd28c
| 59,632
|
py
|
Python
|
tests/contrib/test_pydantic.py
|
spacemanspiff2007/tortoise-orm
|
2591bd1cae75236779c21be559bf191fcc41ffe2
|
[
"Apache-2.0"
] | null | null | null |
tests/contrib/test_pydantic.py
|
spacemanspiff2007/tortoise-orm
|
2591bd1cae75236779c21be559bf191fcc41ffe2
|
[
"Apache-2.0"
] | null | null | null |
tests/contrib/test_pydantic.py
|
spacemanspiff2007/tortoise-orm
|
2591bd1cae75236779c21be559bf191fcc41ffe2
|
[
"Apache-2.0"
] | null | null | null |
import copy
from tests.testmodels import Address, Employee, Event, JSONFields, Reporter, Team, Tournament
from tortoise.contrib import test
from tortoise.contrib.pydantic import pydantic_model_creator, pydantic_queryset_creator
class TestPydantic(test.TestCase):
async def asyncSetUp(self) -> None:
await super(TestPydantic, self).asyncSetUp()
self.Event_Pydantic = pydantic_model_creator(Event)
self.Event_Pydantic_List = pydantic_queryset_creator(Event)
self.Tournament_Pydantic = pydantic_model_creator(Tournament)
self.Team_Pydantic = pydantic_model_creator(Team)
self.Address_Pydantic = pydantic_model_creator(Address)
class PydanticMetaOverride:
backward_relations = False
self.Event_Pydantic_non_backward = pydantic_model_creator(
Event, meta_override=PydanticMetaOverride, name="Event_non_backward"
)
self.tournament = await Tournament.create(name="New Tournament")
self.reporter = await Reporter.create(name="The Reporter")
self.event = await Event.create(
name="Test", tournament=self.tournament, reporter=self.reporter
)
self.event2 = await Event.create(name="Test2", tournament=self.tournament)
self.address = await Address.create(city="Santa Monica", street="Ocean", event=self.event)
self.team1 = await Team.create(name="Onesies")
self.team2 = await Team.create(name="T-Shirts")
await self.event.participants.add(self.team1, self.team2)
await self.event2.participants.add(self.team1, self.team2)
self.maxDiff = None
async def test_backward_relations(self):
event_schema = copy.deepcopy(dict(self.Event_Pydantic.schema()))
event_non_backward_schema = copy.deepcopy(dict(self.Event_Pydantic_non_backward.schema()))
self.assertTrue("address" in event_schema["properties"])
self.assertFalse("address" in event_non_backward_schema["properties"])
del event_schema["properties"]["address"]
self.assertEqual(event_schema["properties"], event_non_backward_schema["properties"])
def test_event_schema(self):
self.assertEqual(
self.Event_Pydantic.schema(),
{
"title": "Event",
"description": "Events on the calendar",
"type": "object",
"properties": {
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
"name": {"title": "Name", "description": "The name", "type": "string"},
"tournament": {
"title": "Tournament",
"description": "What tournaments is a happenin'",
"allOf": [{"$ref": "#/definitions/tests.testmodels.Tournament.leaf"}],
},
"reporter": {
"title": "Reporter",
"nullable": True,
"allOf": [{"$ref": "#/definitions/tests.testmodels.Reporter.leaf"}],
},
"participants": {
"title": "Participants",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Team.leaf"},
},
"modified": {
"title": "Modified",
"readOnly": True,
"type": "string",
"format": "date-time",
},
"token": {"title": "Token", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
"address": {
"title": "Address",
"nullable": True,
"allOf": [{"$ref": "#/definitions/tests.testmodels.Address.leaf"}],
},
},
"required": ["event_id", "name", "tournament", "participants", "modified"],
"additionalProperties": False,
"definitions": {
"tests.testmodels.Tournament.leaf": {
"title": "Tournament",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 32767,
"type": "integer",
},
"name": {"title": "Name", "maxLength": 255, "type": "string"},
"desc": {"title": "Desc", "nullable": True, "type": "string"},
"created": {
"title": "Created",
"readOnly": True,
"type": "string",
"format": "date-time",
},
},
"required": ["id", "name", "created"],
"additionalProperties": False,
},
"tests.testmodels.Reporter.leaf": {
"title": "Reporter",
"description": "Whom is assigned as the reporter",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "type": "string"},
},
"required": ["id", "name"],
"additionalProperties": False,
},
"tests.testmodels.Team.leaf": {
"title": "Team",
"description": "Team that is a playing",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
},
"required": ["id", "name"],
"additionalProperties": False,
},
"tests.testmodels.Address.leaf": {
"title": "Address",
"type": "object",
"properties": {
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
"city": {"title": "City", "maxLength": 64, "type": "string"},
"street": {"title": "Street", "maxLength": 128, "type": "string"},
},
"required": ["city", "street", "event_id"],
"additionalProperties": False,
},
},
},
)
def test_eventlist_schema(self):
self.assertEqual(
self.Event_Pydantic_List.schema(),
{
"title": "Event_list",
"description": "Events on the calendar",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Event"},
"definitions": {
"tests.testmodels.Tournament.leaf": {
"title": "Tournament",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 32767,
"type": "integer",
},
"name": {"title": "Name", "maxLength": 255, "type": "string"},
"desc": {"title": "Desc", "nullable": True, "type": "string"},
"created": {
"title": "Created",
"readOnly": True,
"type": "string",
"format": "date-time",
},
},
"required": ["id", "name", "created"],
"additionalProperties": False,
},
"tests.testmodels.Reporter.leaf": {
"title": "Reporter",
"description": "Whom is assigned as the reporter",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "type": "string"},
},
"required": ["id", "name"],
"additionalProperties": False,
},
"tests.testmodels.Team.leaf": {
"title": "Team",
"description": "Team that is a playing",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
},
"required": ["id", "name"],
"additionalProperties": False,
},
"tests.testmodels.Address.leaf": {
"title": "Address",
"type": "object",
"properties": {
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
"city": {"title": "City", "maxLength": 64, "type": "string"},
"street": {"title": "Street", "maxLength": 128, "type": "string"},
},
"required": ["city", "street", "event_id"],
"additionalProperties": False,
},
"tests.testmodels.Event": {
"title": "Event",
"description": "Events on the calendar",
"type": "object",
"properties": {
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
"name": {"title": "Name", "description": "The name", "type": "string"},
"tournament": {
"title": "Tournament",
"description": "What tournaments is a happenin'",
"allOf": [
{"$ref": "#/definitions/tests.testmodels.Tournament.leaf"}
],
},
"reporter": {
"title": "Reporter",
"nullable": True,
"allOf": [{"$ref": "#/definitions/tests.testmodels.Reporter.leaf"}],
},
"participants": {
"title": "Participants",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Team.leaf"},
},
"modified": {
"title": "Modified",
"readOnly": True,
"type": "string",
"format": "date-time",
},
"token": {"title": "Token", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
"address": {
"title": "Address",
"nullable": True,
"allOf": [{"$ref": "#/definitions/tests.testmodels.Address.leaf"}],
},
},
"required": ["event_id", "name", "tournament", "participants", "modified"],
"additionalProperties": False,
},
},
},
)
def test_address_schema(self):
self.assertEqual(
self.Address_Pydantic.schema(),
{
"title": "Address",
"type": "object",
"properties": {
"city": {"title": "City", "maxLength": 64, "type": "string"},
"street": {"title": "Street", "maxLength": 128, "type": "string"},
"event": {
"title": "Event",
"allOf": [{"$ref": "#/definitions/tests.testmodels.Event.orhjcw"}],
},
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
},
"required": ["city", "street", "event", "event_id"],
"additionalProperties": False,
"definitions": {
"tests.testmodels.Tournament.leaf": {
"title": "Tournament",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 32767,
"type": "integer",
},
"name": {"title": "Name", "maxLength": 255, "type": "string"},
"desc": {"title": "Desc", "nullable": True, "type": "string"},
"created": {
"title": "Created",
"readOnly": True,
"type": "string",
"format": "date-time",
},
},
"required": ["id", "name", "created"],
"additionalProperties": False,
},
"tests.testmodels.Reporter.leaf": {
"title": "Reporter",
"description": "Whom is assigned as the reporter",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "type": "string"},
},
"required": ["id", "name"],
"additionalProperties": False,
},
"tests.testmodels.Team.leaf": {
"title": "Team",
"description": "Team that is a playing",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
},
"required": ["id", "name"],
"additionalProperties": False,
},
"tests.testmodels.Event.orhjcw": {
"title": "Event",
"description": "Events on the calendar",
"type": "object",
"properties": {
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
"name": {"title": "Name", "description": "The name", "type": "string"},
"tournament": {
"title": "Tournament",
"description": "What tournaments is a happenin'",
"allOf": [
{"$ref": "#/definitions/tests.testmodels.Tournament.leaf"}
],
},
"reporter": {
"title": "Reporter",
"nullable": True,
"allOf": [{"$ref": "#/definitions/tests.testmodels.Reporter.leaf"}],
},
"participants": {
"title": "Participants",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Team.leaf"},
},
"modified": {
"title": "Modified",
"readOnly": True,
"type": "string",
"format": "date-time",
},
"token": {"title": "Token", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
},
"required": ["event_id", "name", "tournament", "participants", "modified"],
"additionalProperties": False,
},
},
},
)
def test_tournament_schema(self):
self.assertEqual(
self.Tournament_Pydantic.schema(),
{
"title": "Tournament",
"type": "object",
"properties": {
"id": {"title": "Id", "minimum": 1, "maximum": 32767, "type": "integer"},
"name": {"title": "Name", "maxLength": 255, "type": "string"},
"desc": {"title": "Desc", "nullable": True, "type": "string"},
"created": {
"title": "Created",
"readOnly": True,
"type": "string",
"format": "date-time",
},
"events": {
"title": "Events",
"description": "What tournaments is a happenin'",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Event.b4oydv"},
},
},
"required": ["id", "name", "created", "events"],
"additionalProperties": False,
"definitions": {
"tests.testmodels.Reporter.leaf": {
"title": "Reporter",
"description": "Whom is assigned as the reporter",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "type": "string"},
},
"required": ["id", "name"],
"additionalProperties": False,
},
"tests.testmodels.Team.leaf": {
"title": "Team",
"description": "Team that is a playing",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
},
"required": ["id", "name"],
"additionalProperties": False,
},
"tests.testmodels.Address.leaf": {
"title": "Address",
"type": "object",
"properties": {
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
"city": {"title": "City", "maxLength": 64, "type": "string"},
"street": {"title": "Street", "maxLength": 128, "type": "string"},
},
"required": ["city", "street", "event_id"],
"additionalProperties": False,
},
"tests.testmodels.Event.b4oydv": {
"title": "Event",
"description": "Events on the calendar",
"type": "object",
"properties": {
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
"name": {"title": "Name", "description": "The name", "type": "string"},
"reporter": {
"title": "Reporter",
"nullable": True,
"allOf": [{"$ref": "#/definitions/tests.testmodels.Reporter.leaf"}],
},
"participants": {
"title": "Participants",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Team.leaf"},
},
"modified": {
"title": "Modified",
"readOnly": True,
"type": "string",
"format": "date-time",
},
"token": {"title": "Token", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
"address": {
"title": "Address",
"nullable": True,
"allOf": [{"$ref": "#/definitions/tests.testmodels.Address.leaf"}],
},
},
"required": ["event_id", "name", "participants", "modified"],
"additionalProperties": False,
},
},
},
)
def test_team_schema(self):
self.assertEqual(
self.Team_Pydantic.schema(),
{
"title": "Team",
"description": "Team that is a playing",
"type": "object",
"properties": {
"id": {"title": "Id", "minimum": 1, "maximum": 2147483647, "type": "integer"},
"name": {"title": "Name", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
"events": {
"title": "Events",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Event.dlqoeq"},
},
},
"required": ["id", "name", "events"],
"additionalProperties": False,
"definitions": {
"tests.testmodels.Tournament.leaf": {
"title": "Tournament",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 32767,
"type": "integer",
},
"name": {"title": "Name", "maxLength": 255, "type": "string"},
"desc": {"title": "Desc", "nullable": True, "type": "string"},
"created": {
"title": "Created",
"readOnly": True,
"type": "string",
"format": "date-time",
},
},
"required": ["id", "name", "created"],
"additionalProperties": False,
},
"tests.testmodels.Reporter.leaf": {
"title": "Reporter",
"description": "Whom is assigned as the reporter",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "type": "string"},
},
"required": ["id", "name"],
"additionalProperties": False,
},
"tests.testmodels.Address.leaf": {
"title": "Address",
"type": "object",
"properties": {
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
"city": {"title": "City", "maxLength": 64, "type": "string"},
"street": {"title": "Street", "maxLength": 128, "type": "string"},
},
"required": ["city", "street", "event_id"],
"additionalProperties": False,
},
"tests.testmodels.Event.dlqoeq": {
"title": "Event",
"description": "Events on the calendar",
"type": "object",
"properties": {
"event_id": {
"title": "Event Id",
"minimum": 1,
"maximum": 9223372036854775807,
"type": "integer",
},
"name": {"title": "Name", "description": "The name", "type": "string"},
"tournament": {
"title": "Tournament",
"description": "What tournaments is a happenin'",
"allOf": [
{"$ref": "#/definitions/tests.testmodels.Tournament.leaf"}
],
},
"reporter": {
"title": "Reporter",
"nullable": True,
"allOf": [{"$ref": "#/definitions/tests.testmodels.Reporter.leaf"}],
},
"modified": {
"title": "Modified",
"readOnly": True,
"type": "string",
"format": "date-time",
},
"token": {"title": "Token", "type": "string"},
"alias": {
"title": "Alias",
"minimum": -2147483648,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
"address": {
"title": "Address",
"nullable": True,
"allOf": [{"$ref": "#/definitions/tests.testmodels.Address.leaf"}],
},
},
"required": ["event_id", "name", "tournament", "modified"],
"additionalProperties": False,
},
},
},
)
async def test_eventlist(self):
eventlp = await self.Event_Pydantic_List.from_queryset(Event.all())
# print(eventlp.json(indent=4))
eventldict = eventlp.dict()["__root__"]
# Remove timestamps
del eventldict[0]["modified"]
del eventldict[0]["tournament"]["created"]
del eventldict[1]["modified"]
del eventldict[1]["tournament"]["created"]
self.assertEqual(
eventldict,
[
{
"event_id": self.event.event_id,
"name": "Test",
# "modified": "2020-01-28T10:43:50.901562",
"token": self.event.token,
"alias": None,
"tournament": {
"id": self.tournament.id,
"name": "New Tournament",
"desc": None,
# "created": "2020-01-28T10:43:50.900664"
},
"reporter": {"id": self.reporter.id, "name": "The Reporter"},
"participants": [
{"id": self.team1.id, "name": "Onesies", "alias": None},
{"id": self.team2.id, "name": "T-Shirts", "alias": None},
],
"address": {
"event_id": self.address.pk,
"city": "Santa Monica",
"street": "Ocean",
},
},
{
"event_id": self.event2.event_id,
"name": "Test2",
# "modified": "2020-01-28T10:43:50.901562",
"token": self.event2.token,
"alias": None,
"tournament": {
"id": self.tournament.id,
"name": "New Tournament",
"desc": None,
# "created": "2020-01-28T10:43:50.900664"
},
"reporter": None,
"participants": [
{"id": self.team1.id, "name": "Onesies", "alias": None},
{"id": self.team2.id, "name": "T-Shirts", "alias": None},
],
"address": None,
},
],
)
async def test_event(self):
eventp = await self.Event_Pydantic.from_tortoise_orm(await Event.get(name="Test"))
# print(eventp.json(indent=4))
eventdict = eventp.dict()
# Remove timestamps
del eventdict["modified"]
del eventdict["tournament"]["created"]
self.assertEqual(
eventdict,
{
"event_id": self.event.event_id,
"name": "Test",
# "modified": "2020-01-28T10:43:50.901562",
"token": self.event.token,
"alias": None,
"tournament": {
"id": self.tournament.id,
"name": "New Tournament",
"desc": None,
# "created": "2020-01-28T10:43:50.900664"
},
"reporter": {"id": self.reporter.id, "name": "The Reporter"},
"participants": [
{"id": self.team1.id, "name": "Onesies", "alias": None},
{"id": self.team2.id, "name": "T-Shirts", "alias": None},
],
"address": {"event_id": self.address.pk, "city": "Santa Monica", "street": "Ocean"},
},
)
async def test_address(self):
addressp = await self.Address_Pydantic.from_tortoise_orm(await Address.get(street="Ocean"))
# print(addressp.json(indent=4))
addressdict = addressp.dict()
# Remove timestamps
del addressdict["event"]["tournament"]["created"]
del addressdict["event"]["modified"]
self.assertEqual(
addressdict,
{
"city": "Santa Monica",
"street": "Ocean",
"event": {
"event_id": self.event.event_id,
"name": "Test",
"tournament": {
"id": self.tournament.id,
"name": "New Tournament",
"desc": None,
},
"reporter": {"id": self.reporter.id, "name": "The Reporter"},
"participants": [
{"id": self.team1.id, "name": "Onesies", "alias": None},
{"id": self.team2.id, "name": "T-Shirts", "alias": None},
],
"token": self.event.token,
"alias": None,
},
"event_id": self.address.event_id,
},
)
async def test_tournament(self):
tournamentp = await self.Tournament_Pydantic.from_tortoise_orm(
await Tournament.all().first()
)
# print(tournamentp.json(indent=4))
tournamentdict = tournamentp.dict()
# Remove timestamps
del tournamentdict["events"][0]["modified"]
del tournamentdict["events"][1]["modified"]
del tournamentdict["created"]
self.assertEqual(
tournamentdict,
{
"id": self.tournament.id,
"name": "New Tournament",
"desc": None,
# "created": "2020-01-28T19:41:38.059617",
"events": [
{
"event_id": self.event.event_id,
"name": "Test",
# "modified": "2020-01-28T19:41:38.060070",
"token": self.event.token,
"alias": None,
"reporter": {"id": self.reporter.id, "name": "The Reporter"},
"participants": [
{"id": self.team1.id, "name": "Onesies", "alias": None},
{"id": self.team2.id, "name": "T-Shirts", "alias": None},
],
"address": {
"event_id": self.address.pk,
"city": "Santa Monica",
"street": "Ocean",
},
},
{
"event_id": self.event2.event_id,
"name": "Test2",
# "modified": "2020-01-28T19:41:38.060070",
"token": self.event2.token,
"alias": None,
"reporter": None,
"participants": [
{"id": self.team1.id, "name": "Onesies", "alias": None},
{"id": self.team2.id, "name": "T-Shirts", "alias": None},
],
"address": None,
},
],
},
)
async def test_team(self):
teamp = await self.Team_Pydantic.from_tortoise_orm(await Team.get(id=self.team1.id))
# print(teamp.json(indent=4))
teamdict = teamp.dict()
# Remove timestamps
del teamdict["events"][0]["modified"]
del teamdict["events"][0]["tournament"]["created"]
del teamdict["events"][1]["modified"]
del teamdict["events"][1]["tournament"]["created"]
self.assertEqual(
teamdict,
{
"id": self.team1.id,
"name": "Onesies",
"alias": None,
"events": [
{
"event_id": self.event.event_id,
"name": "Test",
# "modified": "2020-01-28T19:47:03.334077",
"token": self.event.token,
"alias": None,
"tournament": {
"id": self.tournament.id,
"name": "New Tournament",
"desc": None,
# "created": "2020-01-28T19:41:38.059617",
},
"reporter": {"id": self.reporter.id, "name": "The Reporter"},
"address": {
"event_id": self.address.pk,
"city": "Santa Monica",
"street": "Ocean",
},
},
{
"event_id": self.event2.event_id,
"name": "Test2",
# "modified": "2020-01-28T19:47:03.334077",
"token": self.event2.token,
"alias": None,
"tournament": {
"id": self.tournament.id,
"name": "New Tournament",
"desc": None,
# "created": "2020-01-28T19:41:38.059617",
},
"reporter": None,
"address": None,
},
],
},
)
def test_event_named(self):
Event_Named = pydantic_model_creator(Event, name="Foo")
schema = Event_Named.schema()
self.assertEqual(schema["title"], "Foo")
self.assertSetEqual(
set(schema["properties"].keys()),
{
"address",
"alias",
"event_id",
"modified",
"name",
"participants",
"reporter",
"token",
"tournament",
},
)
def test_event_sorted(self):
Event_Named = pydantic_model_creator(Event, sort_alphabetically=True)
schema = Event_Named.schema()
self.assertEqual(
list(schema["properties"].keys()),
[
"address",
"alias",
"event_id",
"modified",
"name",
"participants",
"reporter",
"token",
"tournament",
],
)
def test_event_unsorted(self):
Event_Named = pydantic_model_creator(Event, sort_alphabetically=False)
schema = Event_Named.schema()
self.assertEqual(
list(schema["properties"].keys()),
[
"event_id",
"name",
"tournament",
"reporter",
"participants",
"modified",
"token",
"alias",
"address",
],
)
async def test_json_field(self):
json_field_0 = await JSONFields.create(data={"a": 1})
json_field_1 = await JSONFields.create(data=[{"a": 1, "b": 2}])
json_field_0_get = await JSONFields.get(pk=json_field_0.pk)
json_field_1_get = await JSONFields.get(pk=json_field_1.pk)
creator = pydantic_model_creator(JSONFields)
ret0 = creator.from_orm(json_field_0_get).dict()
self.assertEqual(
ret0,
{
"id": 1,
"data": {"a": 1},
"data_null": None,
"data_default": {"a": 1},
"data_validate": None,
},
)
ret1 = creator.from_orm(json_field_1_get).dict()
self.assertEqual(
ret1,
{
"id": 2,
"data": [{"a": 1, "b": 2}],
"data_null": None,
"data_default": {"a": 1},
"data_validate": None,
},
)
class TestPydanticCycle(test.TestCase):
async def asyncSetUp(self) -> None:
await super(TestPydanticCycle, self).asyncSetUp()
self.Employee_Pydantic = pydantic_model_creator(Employee)
self.root = await Employee.create(name="Root")
self.loose = await Employee.create(name="Loose")
self._1 = await Employee.create(name="1. First H1", manager=self.root)
self._2 = await Employee.create(name="2. Second H1", manager=self.root)
self._1_1 = await Employee.create(name="1.1. First H2", manager=self._1)
self._1_1_1 = await Employee.create(name="1.1.1. First H3", manager=self._1_1)
self._2_1 = await Employee.create(name="2.1. Second H2", manager=self._2)
self._2_2 = await Employee.create(name="2.2. Third H2", manager=self._2)
await self._1.talks_to.add(self._2, self._1_1_1, self.loose)
await self._2_1.gets_talked_to.add(self._2_2, self._1_1, self.loose)
self.maxDiff = None
def test_schema(self):
self.assertEqual(
self.Employee_Pydantic.schema(),
{
"title": "Employee",
"type": "object",
"properties": {
"id": {"title": "Id", "minimum": 1, "maximum": 2147483647, "type": "integer"},
"name": {"title": "Name", "maxLength": 50, "type": "string"},
"talks_to": {
"title": "Talks To",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Employee.5gupxf"},
},
"manager_id": {
"title": "Manager Id",
"minimum": 1,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
"team_members": {
"title": "Team Members",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Employee.4fgkwn"},
},
"name_length": {"title": "Name Length", "type": "integer"},
"team_size": {
"title": "Team Size",
"description": "Computes team size.<br/><br/>Note that this function needs to be annotated with a return type so that pydantic can<br/> generate a valid schema.<br/><br/>Note that the pydantic serializer can't call async methods, but the tortoise helpers<br/> pre-fetch relational data, so that it is available before serialization. So we don't<br/> need to await the relation. We do however have to protect against the case where no<br/> prefetching was done, hence catching and handling the<br/> ``tortoise.exceptions.NoValuesFetched`` exception.",
"type": "integer",
},
},
"required": ["id", "name", "talks_to", "team_members", "name_length", "team_size"],
"additionalProperties": False,
"definitions": {
"tests.testmodels.Employee.leaf": {
"title": "Employee",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "maxLength": 50, "type": "string"},
"manager_id": {
"title": "Manager Id",
"minimum": 1,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
"name_length": {"title": "Name Length", "type": "integer"},
"team_size": {
"title": "Team Size",
"description": "Computes team size.<br/><br/>Note that this function needs to be annotated with a return type so that pydantic can<br/> generate a valid schema.<br/><br/>Note that the pydantic serializer can't call async methods, but the tortoise helpers<br/> pre-fetch relational data, so that it is available before serialization. So we don't<br/> need to await the relation. We do however have to protect against the case where no<br/> prefetching was done, hence catching and handling the<br/> ``tortoise.exceptions.NoValuesFetched`` exception.",
"type": "integer",
},
},
"required": ["id", "name", "name_length", "team_size"],
"additionalProperties": False,
},
"tests.testmodels.Employee.5gupxf": {
"title": "Employee",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "maxLength": 50, "type": "string"},
"talks_to": {
"title": "Talks To",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Employee.leaf"},
},
"manager_id": {
"title": "Manager Id",
"minimum": 1,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
"team_members": {
"title": "Team Members",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Employee.leaf"},
},
"name_length": {"title": "Name Length", "type": "integer"},
"team_size": {
"title": "Team Size",
"description": "Computes team size.<br/><br/>Note that this function needs to be annotated with a return type so that pydantic can<br/> generate a valid schema.<br/><br/>Note that the pydantic serializer can't call async methods, but the tortoise helpers<br/> pre-fetch relational data, so that it is available before serialization. So we don't<br/> need to await the relation. We do however have to protect against the case where no<br/> prefetching was done, hence catching and handling the<br/> ``tortoise.exceptions.NoValuesFetched`` exception.",
"type": "integer",
},
},
"required": [
"id",
"name",
"talks_to",
"team_members",
"name_length",
"team_size",
],
"additionalProperties": False,
},
"tests.testmodels.Employee.4fgkwn": {
"title": "Employee",
"type": "object",
"properties": {
"id": {
"title": "Id",
"minimum": 1,
"maximum": 2147483647,
"type": "integer",
},
"name": {"title": "Name", "maxLength": 50, "type": "string"},
"talks_to": {
"title": "Talks To",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Employee.leaf"},
},
"manager_id": {
"title": "Manager Id",
"minimum": 1,
"maximum": 2147483647,
"nullable": True,
"type": "integer",
},
"team_members": {
"title": "Team Members",
"type": "array",
"items": {"$ref": "#/definitions/tests.testmodels.Employee.leaf"},
},
"name_length": {"title": "Name Length", "type": "integer"},
"team_size": {
"title": "Team Size",
"description": "Computes team size.<br/><br/>Note that this function needs to be annotated with a return type so that pydantic can<br/> generate a valid schema.<br/><br/>Note that the pydantic serializer can't call async methods, but the tortoise helpers<br/> pre-fetch relational data, so that it is available before serialization. So we don't<br/> need to await the relation. We do however have to protect against the case where no<br/> prefetching was done, hence catching and handling the<br/> ``tortoise.exceptions.NoValuesFetched`` exception.",
"type": "integer",
},
},
"required": [
"id",
"name",
"talks_to",
"team_members",
"name_length",
"team_size",
],
"additionalProperties": False,
},
},
},
)
async def test_serialisation(self):
empp = await self.Employee_Pydantic.from_tortoise_orm(await Employee.get(name="Root"))
# print(empp.json(indent=4))
empdict = empp.dict()
self.assertEqual(
empdict,
{
"id": self.root.id,
"manager_id": None,
"name": "Root",
"talks_to": [],
"team_members": [
{
"id": self._1.id,
"manager_id": self.root.id,
"name": "1. First H1",
"talks_to": [
{
"id": self.loose.id,
"manager_id": None,
"name": "Loose",
"name_length": 5,
"team_size": 0,
},
{
"id": self._2.id,
"manager_id": self.root.id,
"name": "2. Second H1",
"name_length": 12,
"team_size": 0,
},
{
"id": self._1_1_1.id,
"manager_id": self._1_1.id,
"name": "1.1.1. First H3",
"name_length": 15,
"team_size": 0,
},
],
"team_members": [
{
"id": self._1_1.id,
"manager_id": self._1.id,
"name": "1.1. First H2",
"name_length": 13,
"team_size": 0,
}
],
"name_length": 11,
"team_size": 1,
},
{
"id": self._2.id,
"manager_id": self.root.id,
"name": "2. Second H1",
"talks_to": [],
"team_members": [
{
"id": self._2_1.id,
"manager_id": self._2.id,
"name": "2.1. Second H2",
"name_length": 14,
"team_size": 0,
},
{
"id": self._2_2.id,
"manager_id": self._2.id,
"name": "2.2. Third H2",
"name_length": 13,
"team_size": 0,
},
],
"name_length": 12,
"team_size": 2,
},
],
"name_length": 4,
"team_size": 2,
},
)
| 46.047876
| 582
| 0.344077
| 3,764
| 59,632
| 5.369554
| 0.066153
| 0.019296
| 0.016328
| 0.027757
| 0.822473
| 0.779872
| 0.749245
| 0.733116
| 0.716343
| 0.694671
| 0
| 0.039899
| 0.530521
| 59,632
| 1,294
| 583
| 46.083462
| 0.682026
| 0.013499
| 0
| 0.676686
| 0
| 0.003249
| 0.249787
| 0.035883
| 0
| 0
| 0
| 0
| 0.017059
| 1
| 0.007311
| false
| 0
| 0.003249
| 0
| 0.012998
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f4a90ba54ecb1ef7e942ab6ad2ff54433503bd2
| 1,805
|
py
|
Python
|
test/test_monitor_running_process.py
|
etcher-be/elib_run
|
b0c4037898e16d88658e7cb263be78be46712f77
|
[
"MIT"
] | null | null | null |
test/test_monitor_running_process.py
|
etcher-be/elib_run
|
b0c4037898e16d88658e7cb263be78be46712f77
|
[
"MIT"
] | 25
|
2018-10-23T05:17:00.000Z
|
2019-06-02T17:27:29.000Z
|
test/test_monitor_running_process.py
|
theendsofinvention/elib_run
|
b0c4037898e16d88658e7cb263be78be46712f77
|
[
"MIT"
] | 1
|
2019-05-28T17:57:46.000Z
|
2019-05-28T17:57:46.000Z
|
# coding=utf-8
import pytest
from mockito import mock, verify, when
# noinspection PyProtectedMember
from elib_run._run import _monitor_running_process
def test_monitor_running_process_poll():
context = mock()
context.command = mock({'returncode': 0})
when(_monitor_running_process).capture_output_from_running_process(context)
when(context).process_finished().thenReturn(True)
when(context).process_timed_out()
_monitor_running_process.monitor_running_process(context)
assert 0 is context.return_code
verify(_monitor_running_process)
when(context).process_finished()
verify(context, times=0).process_timed_out()
def test_monitor_running_process_break():
context = mock()
context.command = mock({'returncode': 0})
when(_monitor_running_process).capture_output_from_running_process(context)
when(context).process_finished().thenReturn(False).thenReturn(False).thenReturn(True)
when(context).process_timed_out().thenReturn(False)
_monitor_running_process.monitor_running_process(context)
assert 0 is context.return_code
verify(_monitor_running_process)
when(context).process_finished()
verify(context, times=2).process_timed_out()
def test_monitor_running_process_timeout():
context = mock()
context.command = mock({'returncode': 0})
when(_monitor_running_process).capture_output_from_running_process(context)
when(context).process_finished().thenReturn(False)
when(context).process_timed_out().thenReturn(True)
with pytest.raises(_monitor_running_process.ProcessTimeoutError):
_monitor_running_process.monitor_running_process(context)
assert -1 is context.return_code
verify(_monitor_running_process)
when(context).process_finished()
verify(context).process_timed_out()
| 37.604167
| 89
| 0.782271
| 220
| 1,805
| 6.027273
| 0.2
| 0.211161
| 0.269231
| 0.117647
| 0.821267
| 0.800151
| 0.76546
| 0.71267
| 0.606335
| 0.606335
| 0
| 0.005668
| 0.120222
| 1,805
| 47
| 90
| 38.404255
| 0.829345
| 0.023823
| 0
| 0.540541
| 0
| 0
| 0.017055
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 1
| 0.081081
| false
| 0
| 0.081081
| 0
| 0.162162
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f5cc51845475d23e7c062a5fac024933ee413c7
| 145
|
py
|
Python
|
loldib/getratings/models/NA/na_galio/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_galio/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_galio/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_galio_top import *
from .na_galio_jng import *
from .na_galio_mid import *
from .na_galio_bot import *
from .na_galio_sup import *
| 24.166667
| 28
| 0.758621
| 25
| 145
| 4
| 0.36
| 0.3
| 0.55
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 145
| 5
| 29
| 29
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6f5f190a2086a0eb2558ae5e597311cc427260bb
| 147
|
py
|
Python
|
crm/forms.py
|
wkshi/tkd_crm
|
03be2ff84d5d38657f527fe19ec05d8e32779ca3
|
[
"Apache-2.0"
] | null | null | null |
crm/forms.py
|
wkshi/tkd_crm
|
03be2ff84d5d38657f527fe19ec05d8e32779ca3
|
[
"Apache-2.0"
] | null | null | null |
crm/forms.py
|
wkshi/tkd_crm
|
03be2ff84d5d38657f527fe19ec05d8e32779ca3
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
class QueryForm(forms.Form):
query_id = forms.CharField(max_length=32)
password = forms.CharField(max_length=32)
| 24.5
| 45
| 0.761905
| 21
| 147
| 5.190476
| 0.666667
| 0.256881
| 0.311927
| 0.422018
| 0.458716
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031746
| 0.142857
| 147
| 5
| 46
| 29.4
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
48e0ee529ca991ae65b3086136e8b1fa3897e76c
| 37,355
|
py
|
Python
|
tests/functional-tests/operator_test.py
|
YiannisGkoufas/ibm-spectrum-scale-csi
|
3e3c75103af11b8de54a1e7cd1301403cfb2af10
|
[
"Apache-2.0"
] | 2
|
2022-01-20T12:44:14.000Z
|
2022-01-24T04:15:58.000Z
|
tests/functional-tests/operator_test.py
|
YiannisGkoufas/ibm-spectrum-scale-csi
|
3e3c75103af11b8de54a1e7cd1301403cfb2af10
|
[
"Apache-2.0"
] | 16
|
2021-11-11T07:59:58.000Z
|
2022-01-27T05:37:47.000Z
|
tests/functional-tests/operator_test.py
|
YiannisGkoufas/ibm-spectrum-scale-csi
|
3e3c75103af11b8de54a1e7cd1301403cfb2af10
|
[
"Apache-2.0"
] | 15
|
2021-11-10T12:27:48.000Z
|
2022-01-20T04:37:53.000Z
|
import time
import re
import random
import logging
import pytest
from kubernetes import client
from kubernetes.client.rest import ApiException
import scale_operator as scaleop
from utils.scale_operator_object_function import randomStringDigits, randomString
import utils.fileset_functions as ff
LOGGER = logging.getLogger()
@pytest.fixture(scope='session')
def _values(request):
global kubeconfig_value, clusterconfig_value, namespace_value
kubeconfig_value, clusterconfig_value, namespace_value, runslow_val = scaleop.get_cmd_values(request)
condition = scaleop.check_ns_exists(kubeconfig_value, namespace_value)
operator = scaleop.Scaleoperator(kubeconfig_value, namespace_value)
read_file = scaleop.read_operator_data(clusterconfig_value, namespace_value)
ff.cred_check(read_file)
fileset_exist = ff.fileset_exists(read_file)
operator.create()
operator.check()
scaleop.check_nodes_available(
read_file["pluginNodeSelector"], "pluginNodeSelector")
scaleop.check_nodes_available(
read_file["provisionerNodeSelector"], "provisionerNodeSelector")
scaleop.check_nodes_available(
read_file["attacherNodeSelector"], "attacherNodeSelector")
yield
operator.delete(condition)
if(not(fileset_exist) and ff.fileset_exists(read_file)):
ff.delete_fileset(read_file)
@pytest.mark.regression
def test_get_version(_values):
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
ff.get_scale_version(test)
scaleop.get_kubernetes_version(kubeconfig_value)
scaleop.scale_function.get_operator_image()
def test_operator_deploy(_values):
LOGGER.info("test_operator_deploy")
LOGGER.info("Every input is correct should run without any error")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.error(str(get_logs_api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
operator_object.delete()
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_wrong_cluster_id(_values):
LOGGER.info("test_wrong_cluster_id : cluster ID is wrong")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
wrong_id = str(random.randint(0, 999999999999999999))
for cluster in test["custom_object_body"]["spec"]["clusters"]:
if "primary" in cluster.keys():
cluster["id"] = wrong_id
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.error(
"Operator custom object is deployed successfully not expected")
assert False
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.debug(str(get_logs_api_response))
search_result = re.search(
"Cluster ID doesnt match the cluster", get_logs_api_response)
LOGGER.debug(search_result)
assert search_result is not None
LOGGER.info("'Cluster ID doesnt match the cluster' failure reason matched")
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_wrong_primaryFS(_values):
LOGGER.info("test_wrong_primaryFS : primaryFS is wrong")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
wrong_primaryFs = randomStringDigits()
for cluster in test["custom_object_body"]["spec"]["clusters"]:
if "primary" in cluster.keys():
cluster["primary"]["primaryFs"] = wrong_primaryFs
test["primaryFs"] = wrong_primaryFs
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.error(
"Operator custom object is deployed successfully not expected")
assert False
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.debug(str(get_logs_api_response))
search_result = re.search(
"Unable to get filesystem", get_logs_api_response)
LOGGER.debug(search_result)
assert search_result is not None
LOGGER.info("'Unable to get filesystem' failure reason matched")
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_wrong_guihost(_values):
LOGGER.info("test_wrong_guihost : gui host is wrong")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
wrong_guiHost = randomStringDigits()
test["guiHost"] = wrong_guiHost
for cluster in test["custom_object_body"]["spec"]["clusters"]:
if "primary" in cluster.keys():
cluster["restApi"][0]["guiHost"] = wrong_guiHost
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.error(
"Operator custom object is deployed successfully not expected")
assert False
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.debug(str(get_logs_api_response))
search_result1 = re.search(
"connection refused", get_logs_api_response)
LOGGER.debug(search_result1)
search_result2 = re.search("no such host", get_logs_api_response)
LOGGER.debug(search_result2)
assert (search_result1 is not None or search_result2 is not None)
LOGGER.info("'connection refused' or 'no such host' failure reason matched")
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_wrong_gui_username(_values):
LOGGER.info("test_wrong_gui_username : gui username is wrong")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
test["username"] = randomStringDigits()
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.error(
"Operator custom object is deployed successfully not expected")
assert False
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.debug(str(get_logs_api_response))
x = re.search("401 Unauthorized", get_logs_api_response)
assert x is not None
LOGGER.info("'401 Unauthorized' failure reason matched")
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_wrong_gui_password(_values):
LOGGER.info("test_wrong_gui_password : gui password is wrong")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
test["password"] = randomStringDigits()
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
operator_object.check()
LOGGER.info("Checkig if failure reason matches")
demonset_pod_name = operator_object.get_driver_ds_pod_name()
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_instance = client.CoreV1Api()
count = 0
while count < 24:
try:
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.debug(str(get_logs_api_response))
search_result = re.search("401 Unauthorized", get_logs_api_response)
if search_result is None:
time.sleep(5)
count += 1
else:
LOGGER.debug(search_result)
LOGGER.info("'401 Unauthorized' failure reason matched")
operator_object.delete()
return
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
LOGGER.error(str(get_logs_api_response))
LOGGER.error("Asserting as reason of failure does not match")
assert search_result is not None
def test_wrong_secret_object_name(_values):
LOGGER.info("test_wrong_secret_object_name : secret object name is wrong")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
secret_name_wrong = randomString()
for cluster in test["custom_object_body"]["spec"]["clusters"]:
if "primary" in cluster.keys():
cluster["secrets"] = secret_name_wrong
test["stateful_set_not_created"] = True
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
operator_object.delete()
def test_random_gpfs_primaryFset_name(_values):
LOGGER.info("test_random_gpfs_primaryFset_name : gpfs primary Fset name is wrong")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
random_primaryFset = randomStringDigits()
test["primaryFset"] = random_primaryFset
for cluster in test["custom_object_body"]["spec"]["clusters"]:
if "primary" in cluster.keys():
cluster["primary"]["primaryFset"] = random_primaryFset
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
else:
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.debug(str(get_logs_api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
operator_object.delete()
if(ff.fileset_exists(test)):
ff.delete_fileset(test)
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
operator_object.delete()
if(ff.fileset_exists(test)):
ff.delete_fileset(test)
assert False
if(ff.fileset_exists(test)):
ff.delete_fileset(test)
operator_object.delete()
def test_secureSslMode(_values):
LOGGER.info("test_secureSslMode")
LOGGER.info("secureSslMode is True while cacert is not available")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
for cluster in test["custom_object_body"]["spec"]["clusters"]:
if "primary" in cluster.keys():
cluster["secureSslMode"] = True
if "cacert" in cluster.keys():
cluster.pop("cacert")
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.error(
"Operator custom object is deployed successfully not expected")
assert False
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.debug(str(get_logs_api_response))
search_result = re.search(
"CA certificate not specified in secure SSL mode for cluster", str(get_logs_api_response))
LOGGER.debug(search_result)
if(search_result is None):
operator_object.delete()
LOGGER.error(str(get_logs_api_response))
LOGGER.error("Reason of failure does not match")
assert search_result is not None
LOGGER.info("'CA certificate not specified in secure SSL mode for cluster' failure reason matched")
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
if(ff.fileset_exists(test)):
ff.delete_fileset(test)
operator_object.delete()
def test_wrong_gpfs_filesystem_mount_point(_values):
LOGGER.info("test_wrong_gpfs_filesystem_mount_point")
LOGGER.info("gpfs filesystem mount point is wrong")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
wrong_scaleHostpath = randomStringDigits()
test["custom_object_body"]["spec"]["scaleHostpath"] = wrong_scaleHostpath
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.error(
"Operator custom object is deployed successfully not expected")
assert False
else:
get_logs_api_instance = client.CoreV1Api()
demonset_pod_name = operator_object.get_driver_ds_pod_name()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod events")
field = "involvedObject.name="+demonset_pod_name
api_response = get_logs_api_instance.list_namespaced_event(
namespace=namespace_value, pretty="True", field_selector=field)
LOGGER.debug(str(api_response))
search_result = re.search(
'MountVolume.SetUp failed for volume', str(api_response))
LOGGER.debug(search_result)
assert search_result is not None
LOGGER.info("'MountVolume.SetUp failed for volume' failure reason matched")
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_unlinked_primaryFset(_values):
LOGGER.info("test_unlinked_primaryFset")
LOGGER.info("unlinked primaryFset expected : object created successfully")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
if(not(ff.fileset_exists(test))):
ff.create_fileset(test)
ff.unlink_fileset(test)
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
else:
get_logs_api_instance = client.CoreV1Api()
try:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.error(str(get_logs_api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
operator_object.delete()
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_existing_primaryFset(_values):
LOGGER.info("test_existing_primaryFset")
LOGGER.info(
"linked existing primaryFset expected : object created successfully")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
if(not(ff.fileset_exists(test))):
ff.create_fileset(test)
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
else:
get_logs_api_instance = client.CoreV1Api()
try:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.error(str(get_logs_api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
operator_object.delete()
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_unmounted_primaryFS(_values):
LOGGER.info("test_unmounted_primaryFS")
LOGGER.info(
"primaryFS is unmounted and expected : custom object should give error")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
ff.unmount_fs(test)
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.error(
"Operator custom object is deployed successfully, it is not expected")
operator_object.delete()
ff.mount_fs(test)
assert False
else:
get_logs_api_instance = client.CoreV1Api()
try:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.debug(str(get_logs_api_response))
search_result = re.search(
'not mounted on GUI node Primary cluster', str(get_logs_api_response))
if search_result is None:
LOGGER.error(str(get_logs_api_response))
LOGGER.debug(search_result)
operator_object.delete()
ff.mount_fs(test)
assert search_result is not None
LOGGER.info("'not mounted on GUI node Primary cluster' failure reason matched")
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
ff.mount_fs(test)
assert False
operator_object.delete()
ff.mount_fs(test)
def test_non_deafult_attacher(_values):
LOGGER.info("test_non_deafult_attacher")
LOGGER.info("attacher image name is changed")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
deployment_attacher_image = "quay.io/k8scsi/csi-attacher:v1.2.1"
test["custom_object_body"]["spec"]["attacher"] = deployment_attacher_image
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
else:
get_logs_api_instance = client.CoreV1Api()
try:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod events")
field = "involvedObject.name="+demonset_pod_name
api_response = get_logs_api_instance.list_namespaced_event(
namespace=namespace_value, pretty="True", field_selector=field)
LOGGER.debug(str(api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_non_deafult_provisioner(_values):
LOGGER.info("test_non_deafult_provisioner")
LOGGER.info("provisioner image name is changed")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
deployment_provisioner_image = "quay.io/k8scsi/csi-provisioner:v1.6.0"
test["custom_object_body"]["spec"]["provisioner"] = deployment_provisioner_image
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
else:
get_logs_api_instance = client.CoreV1Api()
try:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod events")
field = "involvedObject.name="+demonset_pod_name
api_response = get_logs_api_instance.list_namespaced_event(
namespace=namespace_value, pretty="True", field_selector=field)
LOGGER.debug(str(api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_correct_cacert(_values):
LOGGER.info("test_secureSslMode with correct cacert file")
LOGGER.info("correct cacert file is given")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
if not("local_cacert_name" in test):
test["local_cacert_name"] = "test-cacert-configmap"
for cluster in test["custom_object_body"]["spec"]["clusters"]:
if "primary" in cluster.keys():
cluster["secureSslMode"] = True
if not("cacert" in cluster.keys()):
cluster["cacert"] = "test-cacert-configmap"
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
if test["cacert_path"] == "":
LOGGER.info("skipping the test as cacert file path is not given in test.config")
pytest.skip("path of cacert file is not given")
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.info(str(get_logs_api_response))
operator_object.delete()
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_cacert_with_secureSslMode_false(_values):
LOGGER.info("test_cacert_with_secureSslMode_false")
LOGGER.info("secureSslMode is false with correct cacert file")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
if not("local_cacert_name" in test):
test["local_cacert_name"] = "test-cacert-configmap"
for cluster in test["custom_object_body"]["spec"]["clusters"]:
if "primary" in cluster.keys():
cluster["secureSslMode"] = False
if not("cacert" in cluster.keys()):
cluster["cacert"] = "test-cacert-configmap"
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
if test["cacert_path"] == "":
LOGGER.info("skipping the test as cacert file path is not given in test.config")
pytest.skip("path of cacert file is not given")
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.info(str(get_logs_api_response))
operator_object.delete()
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_wrong_cacert(_values):
LOGGER.info("secureSslMode true with wrong cacert file")
LOGGER.info("test_wrong_cacert")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
if not("local_cacert_name" in test):
test["local_cacert_name"] = "test-cacert-configmap"
for cluster in test["custom_object_body"]["spec"]["clusters"]:
if "primary" in cluster.keys():
cluster["secureSslMode"] = True
if not("cacert" in cluster.keys()):
cluster["cacert"] = "test-cacert-configmap"
test["make_cacert_wrong"] = True
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
if test["cacert_path"] == "":
LOGGER.info("skipping the test as cacert file path is not given in test.config")
pytest.skip("path of cacert file is not given")
operator_object.create()
if operator_object.check() is True:
LOGGER.error(
"Operator custom object is deployed successfully not expected")
operator_object.delete()
assert False
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod logs")
get_logs_api_instance = client.CoreV1Api()
count = 0
while count < 24:
try:
get_logs_api_response = get_logs_api_instance.read_namespaced_pod_log(
name=demonset_pod_name, namespace=namespace_value, container="ibm-spectrum-scale-csi")
LOGGER.debug(str(get_logs_api_response))
search_result = re.search(
"Error in plugin initialization: Parsing CA cert", get_logs_api_response)
if search_result is None:
time.sleep(5)
else:
LOGGER.debug(search_result)
break
if count > 23:
operator_object.delete()
assert search_result is not None
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_nodeMapping(_values):
LOGGER.info("test_nodeMapping")
LOGGER.info("nodeMapping is added to the cr file")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
else:
get_logs_api_instance = client.CoreV1Api()
try:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod events")
field = "involvedObject.name="+demonset_pod_name
api_response = get_logs_api_instance.list_namespaced_event(
namespace=namespace_value, pretty="True", field_selector=field)
LOGGER.debug(str(api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_attacherNodeSelector(_values):
LOGGER.info("test_attacherNodeSelector")
LOGGER.info("attacherNodeSelector is added to the cr file")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
desired_daemonset_node, labelled_nodes = operator_object.get_scaleplugin_labelled_nodes(
test["attacherNodeSelector"])
if desired_daemonset_node == labelled_nodes:
LOGGER.info("labelled nodes are equal to desired daemonset nodes")
else:
LOGGER.error(
"labelled nodes are not equal to desired daemonset nodes")
assert False
else:
get_logs_api_instance = client.CoreV1Api()
try:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod events")
field = "involvedObject.name="+demonset_pod_name
api_response = get_logs_api_instance.list_namespaced_event(
namespace=namespace_value, pretty="True", field_selector=field)
LOGGER.debug(str(api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_provisionerNodeSelector(_values):
LOGGER.info("test_provisionerNodeSelector")
LOGGER.info("provisionerNodeSelector is added to the cr file")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
desired_daemonset_node, labelled_nodes = operator_object.get_scaleplugin_labelled_nodes(
test["provisionerNodeSelector"])
if desired_daemonset_node == labelled_nodes:
LOGGER.info("labelled nodes are equal to desired daemonset nodes")
else:
LOGGER.error(
"labelled nodes are not equal to desired daemonset nodes")
assert False
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod events")
field = "involvedObject.name="+demonset_pod_name
api_response = get_logs_api_instance.list_namespaced_event(
namespace=namespace_value, pretty="True", field_selector=field)
LOGGER.debug(str(api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
def test_pluginNodeSelector(_values):
LOGGER.info("test_pluginNodeSelector")
LOGGER.info("pluginNodeSelector is added to the cr file")
test = scaleop.read_operator_data(clusterconfig_value, namespace_value)
operator_object = scaleop.Scaleoperatorobject(test, kubeconfig_value)
operator_object.create()
if operator_object.check() is True:
LOGGER.info("Operator custom object is deployed successfully")
desired_daemonset_node, labelled_nodes = operator_object.get_scaleplugin_labelled_nodes(
test["pluginNodeSelector"])
if desired_daemonset_node == labelled_nodes:
LOGGER.info("labelled nodes are equal to desired daemonset nodes")
else:
LOGGER.error(
"labelled nodes are not equal to desired daemonset nodes")
assert False
else:
demonset_pod_name = operator_object.get_driver_ds_pod_name()
get_logs_api_instance = client.CoreV1Api()
try:
LOGGER.info(f"Checking for failure reason match in {demonset_pod_name} pod events")
field = "involvedObject.name="+demonset_pod_name
api_response = get_logs_api_instance.list_namespaced_event(
namespace=namespace_value, pretty="True", field_selector=field)
LOGGER.debug(str(api_response))
LOGGER.error(
"operator custom object should be deployed but it is not deployed hence asserting")
assert False
except ApiException as e:
LOGGER.error(
f"Exception when calling CoreV1Api->read_namespaced_pod_log: {e}")
assert False
operator_object.delete()
| 46.117284
| 112
| 0.668906
| 4,322
| 37,355
| 5.508329
| 0.057381
| 0.073508
| 0.034444
| 0.031755
| 0.870122
| 0.834628
| 0.813584
| 0.803839
| 0.795942
| 0.784391
| 0
| 0.003551
| 0.253755
| 37,355
| 809
| 113
| 46.174289
| 0.850481
| 0
| 0
| 0.785912
| 0
| 0
| 0.249685
| 0.048213
| 0
| 0
| 0
| 0
| 0.09116
| 1
| 0.033149
| false
| 0.004144
| 0.013812
| 0
| 0.048343
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b206f9804f3227e21dbbbe774a8c90d8ce5d7a8
| 451
|
py
|
Python
|
cupyx/scipy/fftpack/__init__.py
|
svlandeg/cupy
|
484e007d5bf58a0445af2f6e7aa3fdfe0fcc2363
|
[
"MIT"
] | 2
|
2020-02-28T09:27:58.000Z
|
2020-10-12T07:10:24.000Z
|
cupyx/scipy/fftpack/__init__.py
|
svlandeg/cupy
|
484e007d5bf58a0445af2f6e7aa3fdfe0fcc2363
|
[
"MIT"
] | 1
|
2020-06-03T10:59:17.000Z
|
2020-06-03T11:38:20.000Z
|
cupyx/scipy/fftpack/__init__.py
|
svlandeg/cupy
|
484e007d5bf58a0445af2f6e7aa3fdfe0fcc2363
|
[
"MIT"
] | 1
|
2022-03-24T13:19:55.000Z
|
2022-03-24T13:19:55.000Z
|
from cupyx.scipy.fftpack.fft import fft # NOQA
from cupyx.scipy.fftpack.fft import fft2 # NOQA
from cupyx.scipy.fftpack.fft import fftn # NOQA
from cupyx.scipy.fftpack.fft import ifft # NOQA
from cupyx.scipy.fftpack.fft import ifft2 # NOQA
from cupyx.scipy.fftpack.fft import ifftn # NOQA
from cupyx.scipy.fftpack.fft import irfft # NOQA
from cupyx.scipy.fftpack.fft import rfft # NOQA
from cupyx.scipy.fftpack.fft import get_fft_plan # NOQA
| 45.1
| 56
| 0.780488
| 74
| 451
| 4.72973
| 0.216216
| 0.231429
| 0.36
| 0.54
| 0.862857
| 0.862857
| 0.777143
| 0
| 0
| 0
| 0
| 0.005155
| 0.13969
| 451
| 9
| 57
| 50.111111
| 0.896907
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
d28788d0d9b0ba279799f83ab9cf82bdd6a4eb02
| 4,546
|
py
|
Python
|
tests/kyu_5_tests/test_all_that_is_open_must_be_closed.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
tests/kyu_5_tests/test_all_that_is_open_must_be_closed.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
tests/kyu_5_tests/test_all_that_is_open_must_be_closed.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
import unittest
from katas.kyu_5.all_that_is_open_must_be_closed import is_balanced
class IsBalancedTestCase(unittest.TestCase):
def test_true_1(self):
self.assertTrue(is_balanced('(Sensei says yes!)', '()'))
def test_true_2(self):
self.assertTrue(is_balanced('(Sensei [says] yes!)', '()[]'))
def test_true_3(self):
self.assertTrue(is_balanced('Sensei says -yes-!', '--'))
def test_true_4(self):
self.assertTrue(is_balanced('Hello Mother can you hear me?', '()'))
def test_true_5(self):
self.assertTrue(is_balanced('(Hello Mother can you hear me?)', '()'))
def test_true_6(self):
self.assertTrue(is_balanced('(Hello Mother can you hear me?', ''))
def test_true_7(self):
self.assertTrue(is_balanced(
'(Hello Mother can you hear me?)[Monkeys, in my pockets!!]',
'()[]'
))
def test_true_8(self):
self.assertTrue(is_balanced(
'(Hello Mother can you hear me?)[Monkeys, in my pockets!!](Gosh'
'!!)', '()[]'
))
def test_true_9(self):
self.assertTrue(is_balanced('((Hello))', '()'))
def test_true_10(self):
self.assertTrue(is_balanced('(((Hello)))', '()'))
def test_true_11(self):
self.assertTrue(is_balanced('((()Hello()))', '()'))
def test_true_12(self):
self.assertTrue(is_balanced('([{-Hello!-}])', '()[]{}'))
def test_true_13(self):
self.assertTrue(is_balanced('([{([{Hello}])}])', '()[]{}'))
def test_true_14(self):
self.assertTrue(is_balanced('-abcd-e@fghi@', '--@@'))
def test_true_15(self):
self.assertTrue(is_balanced('-Hello Mother can you hear me?-', '--'))
def test_true_16(self):
self.assertTrue(is_balanced('-a@b@cd@e@fghi-', '--@@'))
def test_false_1(self):
self.assertFalse(is_balanced('(Sensei says no!', '()'))
def test_false_2(self):
self.assertFalse(is_balanced('(Sensei [says) no!]', '()[]'))
def test_false_3(self):
self.assertFalse(is_balanced('Sensei -says no!', '--'))
def test_false_4(self):
self.assertFalse(is_balanced('(Hello Mother can you hear me?', '()'))
def test_false_5(self):
self.assertFalse(is_balanced(
'(Hello Mother can you hear me?))', '()'
))
def test_false_6(self):
self.assertFalse(is_balanced(')Hello Mother can you hear me?', '()'))
def test_false_7(self):
self.assertFalse(is_balanced(
'Hello Mother can you hear me?)[Monkeys, in my pockets!!]',
'()[]'
))
def test_false_8(self):
self.assertFalse(is_balanced(
'(Hello Mother can you hear me?[Monkeys, in my pockets!!]',
'()[]'
))
def test_false_9(self):
self.assertFalse(is_balanced(
'(Hello Mother can you hear me?)Monkeys, in my pockets!!]',
'()[]'
))
def test_false_10(self):
self.assertFalse(is_balanced(
'(Hello Mother can you hear me?)[Monkeys, in my pockets!!',
'()[]'
))
def test_false_11(self):
self.assertFalse(is_balanced('((()Hello())', '()'))
def test_false_12(self):
self.assertFalse(is_balanced('(()Hello()))', '()'))
def test_false_13(self):
self.assertFalse(is_balanced('([{-Hello!-})]', '()[]{}'))
def test_false_14(self):
self.assertFalse(is_balanced('-Hello Mother can you hear me?', '--'))
def test_false_15(self):
self.assertFalse(is_balanced('Hello Mother can you hear me?-', '--'))
def test_false_16(self):
self.assertFalse(is_balanced('abcd-e@fghi@', '--@@'))
def test_false_17(self):
self.assertFalse(is_balanced('-abcde@fghi@', '--@@'))
def test_false_18(self):
self.assertFalse(is_balanced('-abcd-efghi@', '--@@'))
def test_false_19(self):
self.assertFalse(is_balanced('-abcd-e@fghi', '--@@'))
def test_false_20(self):
self.assertFalse(is_balanced('-ab@cd@e@fghi-', '--@@'))
def test_false_21(self):
self.assertFalse(is_balanced('-a@bcd@e@fghi-', '--@@'))
def test_false_22(self):
self.assertFalse(is_balanced('-a@b@cde@fghi-', '--@@'))
def test_false_23(self):
self.assertFalse(is_balanced('-a@b@cd@efghi-', '--@@'))
def test_false_24(self):
self.assertFalse(is_balanced('a@b@cd@e@fghi-', '--@@'))
def test_false_25(self):
self.assertFalse(is_balanced('-a@b@cd@e@fghi', '--@@'))
| 30.510067
| 77
| 0.578751
| 574
| 4,546
| 4.355401
| 0.12892
| 0.168
| 0.12
| 0.21
| 0.8864
| 0.842
| 0.8092
| 0.7864
| 0.7732
| 0.63
| 0
| 0.018534
| 0.228553
| 4,546
| 148
| 78
| 30.716216
| 0.694326
| 0
| 0
| 0.180952
| 0
| 0
| 0.24637
| 0
| 0
| 0
| 0
| 0
| 0.390476
| 1
| 0.390476
| false
| 0
| 0.019048
| 0
| 0.419048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d28ee875b765ac6e3503203cfde3010551766336
| 35
|
py
|
Python
|
miniconda3-lnx/pkgs/pip-20.0.2-py37_3/info/test/run_test.py
|
Thibaut-Kovaltchouk/MultiPyzo
|
a15ecf77e31ebeb195e70385f5ac132f6ab4504d
|
[
"CC0-1.0"
] | 1
|
2021-11-08T01:25:40.000Z
|
2021-11-08T01:25:40.000Z
|
miniconda3-lnx/pkgs/pip-20.0.2-py37_3/info/test/run_test.py
|
Thibaut-Kovaltchouk/MultiPyzo
|
a15ecf77e31ebeb195e70385f5ac132f6ab4504d
|
[
"CC0-1.0"
] | 19
|
2021-03-10T21:30:56.000Z
|
2022-02-27T06:45:03.000Z
|
miniconda3-lnx/pkgs/pip-20.0.2-py37_3/info/test/run_test.py
|
Thibaut-Kovaltchouk/MultiPyzo
|
a15ecf77e31ebeb195e70385f5ac132f6ab4504d
|
[
"CC0-1.0"
] | 2
|
2021-11-08T01:25:30.000Z
|
2022-01-13T07:53:38.000Z
|
print("import: 'pip'")
import pip
| 8.75
| 22
| 0.657143
| 5
| 35
| 4.6
| 0.6
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 35
| 3
| 23
| 11.666667
| 0.766667
| 0
| 0
| 0
| 0
| 0
| 0.382353
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
d2e20ef2da9c784ca993bb4ca0188be2201966e9
| 38
|
py
|
Python
|
removeLater.py
|
Luochenghuang/ToxNet
|
e02788c73abf65134385974d8b09fc08f63badbd
|
[
"MIT"
] | 4
|
2018-06-29T16:53:30.000Z
|
2018-07-25T17:58:05.000Z
|
removeLater.py
|
Luochenghuang/ToxNet
|
e02788c73abf65134385974d8b09fc08f63badbd
|
[
"MIT"
] | 1
|
2018-05-24T23:19:54.000Z
|
2018-06-06T22:57:21.000Z
|
removeLater.py
|
Luochenghuang/ToxNet
|
e02788c73abf65134385974d8b09fc08f63badbd
|
[
"MIT"
] | 2
|
2018-12-29T18:59:49.000Z
|
2019-02-22T19:05:56.000Z
|
import python
def remove():
return 1
| 9.5
| 13
| 0.736842
| 6
| 38
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032258
| 0.184211
| 38
| 4
| 14
| 9.5
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
d2e9887239c92db46be2f87b2c5579c16f7840a8
| 1,627
|
py
|
Python
|
fused_lasso/gen_data.py
|
vonguyenleduy/parametric_si_generalized_lasso
|
3ef9e2d47036b812b9737d4d5b887000dc24ce03
|
[
"BSD-3-Clause"
] | null | null | null |
fused_lasso/gen_data.py
|
vonguyenleduy/parametric_si_generalized_lasso
|
3ef9e2d47036b812b9737d4d5b887000dc24ce03
|
[
"BSD-3-Clause"
] | null | null | null |
fused_lasso/gen_data.py
|
vonguyenleduy/parametric_si_generalized_lasso
|
3ef9e2d47036b812b9737d4d5b887000dc24ce03
|
[
"BSD-3-Clause"
] | 1
|
2021-06-04T19:22:37.000Z
|
2021-06-04T19:22:37.000Z
|
import numpy as np
from scipy.stats import skewnorm
def generate(n, p, beta_vec):
X = []
y = []
for i in range(n):
X.append([])
yi = 0
for j in range(p):
xij = np.random.normal(0, 1)
X[i].append(xij)
yi = yi + xij * beta_vec[j]
noise = np.random.normal(0, 1)
yi = yi + noise
y.append(yi)
X = np.array(X)
y = np.array(y)
return X, y
def generate_test(n, p, beta_vec):
X = []
y = []
true_y = []
for i in range(n):
X.append([])
yi = 0
for j in range(p):
xij = np.random.normal(0, 1)
X[i].append(xij)
yi = yi + xij * beta_vec[j]
true_y.append(yi)
noise = np.random.normal(0, 1)
yi = yi + noise
y.append(yi)
X = np.array(X)
y = np.array(y)
true_y = np.array(true_y)
return X, y, true_y
def generate_non_normal(n, p, beta_vec):
X = []
y = []
true_y = []
for i in range(n):
X.append([])
yi = 0
for j in range(p):
xij = np.random.normal(0, 1)
X[i].append(xij)
yi = yi + xij * beta_vec[j]
true_y.append(yi)
noise = np.random.normal(0, 1)
# noise = np.random.laplace(0, 1)
# noise = skewnorm.rvs(a=10, loc=0, scale=1)
# noise = np.random.standard_t(20)
yi = yi + noise
y.append(yi)
X = np.array(X)
y = np.array(y)
true_y = np.array(true_y)
return X, y, true_y
| 20.594937
| 53
| 0.448064
| 244
| 1,627
| 2.905738
| 0.17623
| 0.070522
| 0.118477
| 0.126939
| 0.757405
| 0.757405
| 0.7433
| 0.7433
| 0.7433
| 0.7433
| 0
| 0.02416
| 0.414874
| 1,627
| 79
| 54
| 20.594937
| 0.720588
| 0.065765
| 0
| 0.892857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053571
| false
| 0
| 0.035714
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
828f19e5a090f68cda014cff42ee74a0a8354d64
| 200
|
py
|
Python
|
annotationtools/writers/__init__.py
|
tomaszfurmanek/CRIMAC-annotationtools
|
c9f6231442a949e015e50b707aefec0c91d8d11a
|
[
"MIT"
] | null | null | null |
annotationtools/writers/__init__.py
|
tomaszfurmanek/CRIMAC-annotationtools
|
c9f6231442a949e015e50b707aefec0c91d8d11a
|
[
"MIT"
] | 2
|
2021-03-11T10:45:25.000Z
|
2021-06-03T08:33:00.000Z
|
annotationtools/writers/__init__.py
|
tomaszfurmanek/CRIMAC-annotationtools
|
c9f6231442a949e015e50b707aefec0c91d8d11a
|
[
"MIT"
] | 1
|
2021-09-15T13:06:48.000Z
|
2021-09-15T13:06:48.000Z
|
"""
Include code to unpack manufacturer-specific data files into an interoperable netCDF format.
"""
from .annotation_to_work import annotation_to_work
from .annotation_to_nc import annotation_to_nc
| 28.571429
| 92
| 0.835
| 29
| 200
| 5.482759
| 0.62069
| 0.301887
| 0.201258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115
| 200
| 6
| 93
| 33.333333
| 0.898305
| 0.46
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
82a45883d44f810674eaca5ea88bc1b0c27e045b
| 7,146
|
py
|
Python
|
core/models/model_utils/resnet_blocks.py
|
swipswaps/retinal_oct
|
a99f93d88833fc328b9b7f6aaabe1310632c644b
|
[
"MIT"
] | 15
|
2021-01-29T17:05:38.000Z
|
2022-03-16T17:47:42.000Z
|
core/models/model_utils/resnet_blocks.py
|
solomonkimunyu/retinal_oct
|
a99f93d88833fc328b9b7f6aaabe1310632c644b
|
[
"MIT"
] | null | null | null |
core/models/model_utils/resnet_blocks.py
|
solomonkimunyu/retinal_oct
|
a99f93d88833fc328b9b7f6aaabe1310632c644b
|
[
"MIT"
] | 14
|
2021-03-03T03:16:31.000Z
|
2022-03-23T19:23:42.000Z
|
from core.models.model_utils.layers import *
class ResidualBlock(tf.keras.layers.Layer):
def __init__(self,
filters,
strides,
use_projection=False,
data_format='channels_last',
dropblock_keep_prob=None,
dropblock_size=None,
sk_ratio=0.0,
se_ratio=0.0,
**kwargs):
super(ResidualBlock, self).__init__(**kwargs)
del dropblock_keep_prob
del dropblock_size
self.conv2d_bn_layers = []
self.shortcut_layers = []
if use_projection:
if sk_ratio > 0: # Use ResNet-D (https://arxiv.org/abs/1812.01187)
if strides > 1:
self.shortcut_layers.append(FixedPadding(2, data_format))
self.shortcut_layers.append(
tf.keras.layers.AveragePooling2D(
pool_size=2,
strides=strides,
padding='SAME' if strides == 1 else 'VALID',
data_format=data_format))
self.shortcut_layers.append(
Conv2dFixedPadding(
filters=filters,
kernel_size=1,
strides=1,
data_format=data_format))
else:
self.shortcut_layers.append(
Conv2dFixedPadding(
filters=filters,
kernel_size=1,
strides=strides,
data_format=data_format))
self.shortcut_layers.append(
BatchNormRelu(relu=False, data_format=data_format))
self.conv2d_bn_layers.append(
Conv2dFixedPadding(
filters=filters,
kernel_size=3,
strides=strides,
data_format=data_format))
self.conv2d_bn_layers.append(BatchNormRelu(data_format=data_format))
self.conv2d_bn_layers.append(
Conv2dFixedPadding(
filters=filters, kernel_size=3, strides=1, data_format=data_format))
self.conv2d_bn_layers.append(
BatchNormRelu(relu=False, init_zero=True, data_format=data_format))
if se_ratio > 0:
self.se_layer = SE_Layer(filters, se_ratio, data_format=data_format)
self.se_ratio = se_ratio
def call(self, inputs, training):
shortcut = inputs
for layer in self.shortcut_layers:
# Projection shortcut in first layer to match filters and strides
shortcut = layer(shortcut, training=training)
for layer in self.conv2d_bn_layers:
inputs = layer(inputs, training=training)
if self.se_ratio > 0:
inputs = self.se_layer(inputs, training=training)
return tf.nn.relu(inputs + shortcut)
class BottleneckBlock(tf.keras.layers.Layer):
"""BottleneckBlock."""
def __init__(self,
filters,
strides,
use_projection=False,
data_format='channels_last',
dropblock_keep_prob=None,
dropblock_size=None,
sk_ratio=0.0,
se_ratio=0.0,
**kwargs):
super(BottleneckBlock, self).__init__(**kwargs)
self.projection_layers = []
if use_projection:
filters_out = 4 * filters
if sk_ratio > 0: # Use ResNet-D (https://arxiv.org/abs/1812.01187)
if strides > 1:
self.projection_layers.append(FixedPadding(2, data_format))
self.projection_layers.append(
tf.keras.layers.AveragePooling2D(
pool_size=2,
strides=strides,
padding='SAME' if strides == 1 else 'VALID',
data_format=data_format))
self.projection_layers.append(
Conv2dFixedPadding(
filters=filters_out,
kernel_size=1,
strides=1,
data_format=data_format))
else:
self.projection_layers.append(
Conv2dFixedPadding(
filters=filters_out,
kernel_size=1,
strides=strides,
data_format=data_format))
self.projection_layers.append(
BatchNormRelu(relu=False, data_format=data_format))
self.shortcut_dropblock = DropBlock(
data_format=data_format,
keep_prob=dropblock_keep_prob,
dropblock_size=dropblock_size)
self.conv_relu_dropblock_layers = []
self.conv_relu_dropblock_layers.append(
Conv2dFixedPadding(
filters=filters, kernel_size=1, strides=1, data_format=data_format))
self.conv_relu_dropblock_layers.append(
BatchNormRelu(data_format=data_format))
self.conv_relu_dropblock_layers.append(
DropBlock(
data_format=data_format,
keep_prob=dropblock_keep_prob,
dropblock_size=dropblock_size))
if sk_ratio > 0:
self.conv_relu_dropblock_layers.append(
SK_Conv2D(filters, strides, sk_ratio, data_format=data_format))
else:
self.conv_relu_dropblock_layers.append(
Conv2dFixedPadding(
filters=filters,
kernel_size=3,
strides=strides,
data_format=data_format))
self.conv_relu_dropblock_layers.append(
BatchNormRelu(data_format=data_format))
self.conv_relu_dropblock_layers.append(
DropBlock(
data_format=data_format,
keep_prob=dropblock_keep_prob,
dropblock_size=dropblock_size))
self.conv_relu_dropblock_layers.append(
Conv2dFixedPadding(
filters=4 * filters,
kernel_size=1,
strides=1,
data_format=data_format))
self.conv_relu_dropblock_layers.append(
BatchNormRelu(relu=False, init_zero=True, data_format=data_format))
self.conv_relu_dropblock_layers.append(
DropBlock(
data_format=data_format,
keep_prob=dropblock_keep_prob,
dropblock_size=dropblock_size))
if se_ratio > 0:
self.conv_relu_dropblock_layers.append(
SE_Layer(filters, se_ratio, data_format=data_format))
def call(self, inputs, training):
shortcut = inputs
for layer in self.projection_layers:
shortcut = layer(shortcut, training=training)
shortcut = self.shortcut_dropblock(shortcut, training=training)
for layer in self.conv_relu_dropblock_layers:
inputs = layer(inputs, training=training)
return tf.nn.relu(inputs + shortcut)
| 39.04918
| 84
| 0.552897
| 696
| 7,146
| 5.376437
| 0.114943
| 0.144308
| 0.093533
| 0.133618
| 0.85489
| 0.830305
| 0.803314
| 0.761892
| 0.727953
| 0.70604
| 0
| 0.016555
| 0.374475
| 7,146
| 182
| 85
| 39.263736
| 0.820582
| 0.024769
| 0
| 0.807453
| 0
| 0
| 0.006321
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024845
| false
| 0
| 0.006211
| 0
| 0.055901
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
82c969a88d3f2f77bde04581fa43d8941b47d236
| 10,632
|
py
|
Python
|
testing/metaopt/test_random.py
|
Neuraxio/Neuraxle
|
0615701b781c948e4ec38fa61c6b3a5d8d72c147
|
[
"Apache-2.0"
] | 519
|
2019-03-29T19:17:41.000Z
|
2022-03-31T12:45:42.000Z
|
testing/metaopt/test_random.py
|
Neuraxio/Neuraxle
|
0615701b781c948e4ec38fa61c6b3a5d8d72c147
|
[
"Apache-2.0"
] | 401
|
2019-06-09T19:06:47.000Z
|
2022-03-31T14:00:12.000Z
|
testing/metaopt/test_random.py
|
Neuraxio/Neuraxle
|
0615701b781c948e4ec38fa61c6b3a5d8d72c147
|
[
"Apache-2.0"
] | 55
|
2019-06-09T19:24:31.000Z
|
2022-01-22T00:17:42.000Z
|
"""
Tests for Metaopt
=============================================
..
Copyright 2019, Neuraxio Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pytest
import math
import numpy as np
from neuraxle.metaopt.random import WalkForwardTimeSeriesCrossValidationWrapper, AnchoredWalkForwardTimeSeriesCrossValidationWrapper
classic_walforward_parameters = {
# (training_size, validation_size, padding_between_training_and_validation, drop_remainder)
# Pair 1:
(9, 3, 1, False),
# Pair 2:
(9, 3, 1, True),
# Pair 3:
(9, 3, 2, False),
# Pair 4:
(8, 3, 2, False),
# Pair 4:
(5, 2, 3, True),
# Pair 5 (Default Parameters):
(2, None, 0, False),
}
@pytest.mark.parametrize("training_window_size, validation_window_size, "
"padding_between_training_and_validation, drop_remainder",
classic_walforward_parameters)
def test_classic_walkforward_crossvalidation_split(training_window_size: int, validation_window_size: int,
padding_between_training_and_validation: int, drop_remainder: bool):
# Arrange
# set random seed
np.random.seed(10)
# Defines the data shape.
batch_size = 7
time_series_size = 20
features_size = 2
validation_window_size_temp = validation_window_size
if validation_window_size is None:
validation_window_size_temp = training_window_size
if drop_remainder:
# We have one less number of fold if we drop remainder.
number_of_fold = math.floor(
(time_series_size - training_window_size - padding_between_training_and_validation) /
validation_window_size_temp
)
else:
number_of_fold = math.ceil(
(time_series_size - training_window_size - padding_between_training_and_validation) /
validation_window_size_temp
)
# Calculate the size of the remainder
remainder_size = (time_series_size - training_window_size - padding_between_training_and_validation) % \
validation_window_size_temp
if remainder_size == 0:
# Remainder of 0 means the data perfecftly fits in the number of fold and remainder should window_size instead.
remainder_size = validation_window_size_temp
# Initialize the inputs.
data_inputs = np.random.randint(low=0, high=1, size=(batch_size, time_series_size, features_size)).astype(float)
expected_outputs = np.random.randint(low=0, high=1, size=(batch_size, time_series_size, features_size)) \
.astype(float)
# Initialize the class to test.
step = WalkForwardTimeSeriesCrossValidationWrapper(
validation_window_size=validation_window_size,
training_window_size=training_window_size,
padding_between_training_and_validation=padding_between_training_and_validation,
drop_remainder=drop_remainder
)
# Act
train_data_inputs, train_expected_outputs, validation_data_inputs, validation_expected_outputs = \
step.split(data_inputs, expected_outputs)
# Assert
assert len(train_data_inputs) == number_of_fold
assert len(train_expected_outputs) == number_of_fold
assert len(validation_data_inputs) == number_of_fold
assert len(validation_expected_outputs) == number_of_fold
assert train_data_inputs[0].shape == (batch_size, training_window_size, features_size)
assert train_expected_outputs[0].shape == (batch_size, training_window_size, features_size)
assert validation_data_inputs[0].shape == (batch_size, validation_window_size_temp, features_size)
assert validation_expected_outputs[0].shape == (batch_size, validation_window_size_temp, features_size)
assert train_data_inputs[1].shape == (batch_size, training_window_size, features_size)
assert train_expected_outputs[1].shape == (batch_size, training_window_size, features_size)
assert validation_data_inputs[1].shape == (batch_size, validation_window_size_temp, features_size)
assert validation_expected_outputs[1].shape == (batch_size, validation_window_size_temp, features_size)
if drop_remainder:
assert train_data_inputs[-1].shape == (
batch_size, training_window_size, features_size)
assert train_expected_outputs[-1].shape == (
batch_size, training_window_size, features_size)
assert validation_data_inputs[-1].shape == (batch_size, validation_window_size_temp, features_size)
assert validation_expected_outputs[-1].shape == (batch_size, validation_window_size_temp, features_size)
else:
assert train_data_inputs[-1].shape == (
batch_size, training_window_size, features_size)
assert train_expected_outputs[-1].shape == (
batch_size, training_window_size, features_size)
assert validation_data_inputs[-1].shape == (batch_size, remainder_size, features_size)
assert validation_expected_outputs[-1].shape == (batch_size, remainder_size, features_size)
anchored_walforward_parameters = {
# (minimum_training_size, validation_window_size, padding_between_training_and_validation, drop_remainder)
# Pair 1:
(9, 3, 1, False),
# Pair 2:
(9, 3, 1, True),
# Pair 3:
(9, 3, 2, False),
# Pair 4:
(8, 3, 2, False),
# Pair 4:
(5, 2, 3, True),
# Pair 5 (Default Parameters):
(2, None, 0, False),
}
@pytest.mark.parametrize(
"minimum_training_size, validation_window_size, padding_between_training_and_validation, drop_remainder",
anchored_walforward_parameters)
def test_anchored_walkforward_crossvalidation_split(minimum_training_size: int, validation_window_size: int,
padding_between_training_and_validation: int, drop_remainder: bool):
# Arrange
# set random seed
np.random.seed(10)
# Defines the data shape.
batch_size = 7
time_series_size = 20
features_size = 2
validation_window_size_temp = validation_window_size
if validation_window_size is None:
validation_window_size_temp = minimum_training_size
if drop_remainder:
# We have one less number of fold if we drop remainder.
number_of_fold = math.floor(
(time_series_size - minimum_training_size - padding_between_training_and_validation) /
validation_window_size_temp
)
else:
number_of_fold = math.ceil(
(time_series_size - minimum_training_size - padding_between_training_and_validation) /
validation_window_size_temp
)
# Calculate the size of the remainder
remainder_size = (time_series_size - minimum_training_size - padding_between_training_and_validation) % \
validation_window_size_temp
if remainder_size == 0:
# Remainder of 0 means the data perfectly fits in the number of fold and remainder should window_size instead.
remainder_size = validation_window_size_temp
# Initialize the inputs.
data_inputs = np.random.randint(low=0, high=1, size=(batch_size, time_series_size, features_size)).astype(float)
expected_outputs = np.random.randint(low=0, high=1, size=(batch_size, time_series_size, features_size)) \
.astype(float)
# Initialize the class to test.
step = AnchoredWalkForwardTimeSeriesCrossValidationWrapper(
validation_window_size=validation_window_size,
minimum_training_size=minimum_training_size,
padding_between_training_and_validation=padding_between_training_and_validation,
drop_remainder=drop_remainder
)
# Act
train_data_inputs, train_expected_outputs, validation_data_inputs, validation_expected_outputs = \
step.split(data_inputs, expected_outputs)
# Assert
assert len(train_data_inputs) == number_of_fold
assert len(train_expected_outputs) == number_of_fold
assert len(validation_data_inputs) == number_of_fold
assert len(validation_expected_outputs) == number_of_fold
assert train_data_inputs[0].shape == (batch_size, minimum_training_size, features_size)
assert train_expected_outputs[0].shape == (batch_size, minimum_training_size, features_size)
assert validation_data_inputs[0].shape == (batch_size, validation_window_size_temp, features_size)
assert validation_expected_outputs[0].shape == (batch_size, validation_window_size_temp, features_size)
assert train_data_inputs[1].shape == (batch_size, minimum_training_size + validation_window_size_temp,
features_size)
assert train_expected_outputs[1].shape == (batch_size, minimum_training_size + validation_window_size_temp,
features_size)
assert validation_data_inputs[1].shape == (batch_size, validation_window_size_temp, features_size)
assert validation_expected_outputs[1].shape == (batch_size, validation_window_size_temp, features_size)
if drop_remainder:
assert train_data_inputs[-1].shape == (
batch_size, minimum_training_size + (number_of_fold - 1) * validation_window_size_temp, features_size)
assert train_expected_outputs[-1].shape == (
batch_size, minimum_training_size + (number_of_fold - 1) * validation_window_size_temp, features_size)
assert validation_data_inputs[-1].shape == (batch_size, validation_window_size_temp, features_size)
assert validation_expected_outputs[-1].shape == (batch_size, validation_window_size_temp, features_size)
else:
assert train_data_inputs[-1].shape == (
batch_size, time_series_size - remainder_size - padding_between_training_and_validation, features_size)
assert train_expected_outputs[-1].shape == (
batch_size, time_series_size - remainder_size - padding_between_training_and_validation, features_size)
assert validation_data_inputs[-1].shape == (batch_size, remainder_size, features_size)
assert validation_expected_outputs[-1].shape == (batch_size, remainder_size, features_size)
| 45.050847
| 132
| 0.722348
| 1,318
| 10,632
| 5.427921
| 0.116844
| 0.082471
| 0.114621
| 0.093933
| 0.855465
| 0.85267
| 0.841487
| 0.841487
| 0.841487
| 0.836315
| 0
| 0.013176
| 0.200527
| 10,632
| 235
| 133
| 45.242553
| 0.828471
| 0.152276
| 0
| 0.743243
| 0
| 0
| 0.022628
| 0.018838
| 0
| 0
| 0
| 0
| 0.27027
| 1
| 0.013514
| false
| 0
| 0.027027
| 0
| 0.040541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7da0bb921d254a1f8ebe153262b102bcfc686383
| 3,816
|
py
|
Python
|
tests/test_cost_calculators/test_pooling.py
|
suga93/chainer_computational_cost
|
50869d3efebdff3e681213b98bebe097460369ab
|
[
"MIT"
] | 52
|
2018-10-17T05:54:49.000Z
|
2021-06-09T09:19:48.000Z
|
tests/test_cost_calculators/test_pooling.py
|
suga93/chainer_computational_cost
|
50869d3efebdff3e681213b98bebe097460369ab
|
[
"MIT"
] | 18
|
2018-07-31T11:08:41.000Z
|
2020-06-01T08:50:13.000Z
|
tests/test_cost_calculators/test_pooling.py
|
suga93/chainer_computational_cost
|
50869d3efebdff3e681213b98bebe097460369ab
|
[
"MIT"
] | 7
|
2018-10-28T13:17:32.000Z
|
2021-03-16T13:32:17.000Z
|
import chainer.functions.pooling as P
import numpy as np
from helpers import calculate_cost
def test_max_pooling():
x = np.random.randn(1, 3, 100, 100).astype(np.float32)
f = P.max_pooling_2d.MaxPooling2D(np.int64(2), np.int64(2),
np.int64(0), cover_all=True)
flops, mread, mwrite, params = calculate_cost(f, [x])
# flops is (output size) * (inside window operation)
# when window size is 2x2, max operation is applied 2x2-1 times.
assert flops == (3 * 50 * 50) * (2 * 2 - 1)
assert mread == x.size
assert mwrite == (3 * 50 * 50)
assert params == {'k': 2, 's': 2, 'p': 0}
assert type(params['k']) is int
assert type(params['s']) is int
assert type(params['p']) is int
def test_average_pooling():
x = np.random.randn(1, 3, 100, 100).astype(np.float32)
f = P.average_pooling_2d.AveragePooling2D(np.int64(2), np.int64(2),
np.int64(0), cover_all=True)
flops, mread, mwrite, params = calculate_cost(f, [x])
# flops is (output size) * (inside window operation)
# when window size is 2x2, max operation is applied 2x2-1 times.
assert flops == (3 * 50 * 50) * ((2 * 2 - 1) + 1)
assert mread == x.size
assert mwrite == (3 * 50 * 50)
assert params == {'k': 2, 's': 2, 'p': 0}
assert type(params['k']) is int
assert type(params['s']) is int
assert type(params['p']) is int
def test_unpooling_2d():
x = np.random.randn(1, 3, 10, 10).astype(np.float32)
f = P.unpooling_2d.Unpooling2D(
ksize=np.int64(3), stride=np.int64(3), outsize=(30, 30))
flops, mread, mwrite, params = calculate_cost(f, [x])
assert flops == 0
assert mread == 1 * 3 * 10 * 10
assert mwrite == 3 * 30 * 30
assert params == {
'k': 3, 's': 3, 'p': 0, 'outsize': (30, 30), 'cover_all': True
}
assert type(params['k']) is int
assert type(params['s']) is int
assert type(params['p']) is int
def test_unpooling_2d_no_outsize():
x = np.random.randn(1, 3, 10, 10).astype(np.float32)
f = P.unpooling_2d.Unpooling2D(ksize=np.int64(3), stride=np.int64(3))
flops, mread, mwrite, params = calculate_cost(f, [x])
assert flops == 0
assert mread == 1 * 3 * 10 * 10
assert mwrite == 3 * 28 * 28
assert params == {
'k': 3, 's': 3, 'p': 0, 'outsize': (28, 28), 'cover_all': True
}
assert type(params['k']) is int
assert type(params['s']) is int
assert type(params['p']) is int
def test_upsampling_2d():
x = np.random.randn(1, 3, 10, 10).astype(np.float32)
indices = np.random.randint(0, 9, (1, 3, 10, 10)).astype(np.int32)
f = P.upsampling_2d.Upsampling2D(indices, ksize=np.int64(3),
stride=np.int64(3), outsize=(30, 30))
flops, mread, mwrite, params = calculate_cost(f, [x])
assert flops == 0
assert mread == 2 * 3 * 10 * 10
assert mwrite == 3 * 30 * 30
assert params == {
'k': 3, 's': 3, 'p': 0, 'outsize': (30, 30), 'cover_all': True
}
assert type(params['k']) is int
assert type(params['s']) is int
assert type(params['p']) is int
def test_upsampling_2d_no_outsize():
x = np.random.randn(1, 3, 10, 10).astype(np.float32)
indices = np.random.randint(0, 9, (1, 3, 10, 10)).astype(np.int32)
f = P.upsampling_2d.Upsampling2D(indices, ksize=np.int64(3),
stride=np.int64(3))
flops, mread, mwrite, params = calculate_cost(f, [x])
assert flops == 0
assert mread == 2 * 3 * 10 * 10
assert mwrite == 3 * 28 * 28
assert params == {
'k': 3, 's': 3, 'p': 0, 'outsize': (28, 28), 'cover_all': True
}
assert type(params['k']) is int
assert type(params['s']) is int
assert type(params['p']) is int
| 36.692308
| 74
| 0.576782
| 588
| 3,816
| 3.680272
| 0.117347
| 0.083179
| 0.133087
| 0.083179
| 0.930684
| 0.930684
| 0.930684
| 0.930684
| 0.930684
| 0.930684
| 0
| 0.090749
| 0.257862
| 3,816
| 103
| 75
| 37.048544
| 0.673376
| 0.059486
| 0
| 0.738095
| 0
| 0
| 0.027902
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.071429
| false
| 0
| 0.035714
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7dd7ceba9f158c92cc078578a58985ce19ed9ff1
| 207
|
py
|
Python
|
Intermediate/9_2_import/main.py
|
Pure-L0G1C/LearnPython
|
ac0381ff5171ccbd34e92262cc19bf77bbf613f8
|
[
"MIT"
] | 22
|
2018-06-05T09:14:23.000Z
|
2020-11-02T22:52:32.000Z
|
Intermediate/9_2_import/main.py
|
s1ko/LearnPython
|
ea3b01fc93f541c8d136f866a8fd448dbed72ef2
|
[
"MIT"
] | 1
|
2020-02-08T19:12:43.000Z
|
2020-02-08T19:12:43.000Z
|
Intermediate/9_2_import/main.py
|
s1ko/LearnPython
|
ea3b01fc93f541c8d136f866a8fd448dbed72ef2
|
[
"MIT"
] | 13
|
2018-06-05T15:59:04.000Z
|
2020-11-02T22:52:33.000Z
|
from lib.src import b # lib/src/b.py
from lib.src import c # lib/src/c.py
# from lib.src.vendor.new.src.lib import fake # lib/src/vendor/new/src/lib/fake.py
from lib.src.c import letter as letter_c
| 29.571429
| 83
| 0.710145
| 44
| 207
| 3.318182
| 0.272727
| 0.287671
| 0.273973
| 0.246575
| 0.287671
| 0.287671
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164251
| 207
| 6
| 84
| 34.5
| 0.843931
| 0.507246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7dea2a3d6a2183649890e0c34612e737155f1b13
| 6,651
|
py
|
Python
|
MatchApp/migrations/0024_auto_20210319_0108.py
|
elizza19/django_local_library
|
f2dc053e44684b7a966d8bc0ff364f5251449f5b
|
[
"Apache-2.0"
] | null | null | null |
MatchApp/migrations/0024_auto_20210319_0108.py
|
elizza19/django_local_library
|
f2dc053e44684b7a966d8bc0ff364f5251449f5b
|
[
"Apache-2.0"
] | null | null | null |
MatchApp/migrations/0024_auto_20210319_0108.py
|
elizza19/django_local_library
|
f2dc053e44684b7a966d8bc0ff364f5251449f5b
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-19 00:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('MatchApp', '0023_auto_20210318_2056'),
]
operations = [
migrations.RemoveField(
model_name='personalidad_perro',
name='id_mascota',
),
migrations.AddField(
model_name='mascotas',
name='amigable',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='carinoso',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='curioso',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='deportista',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='dominante',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='educado',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='extrovertido',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='impulsivo',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='independiente',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='jugueton',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='relacion_p_hembras',
field=models.CharField(blank=True, choices=[('S', 'Si'), ('N', 'No')], max_length=20, null=True),
),
migrations.AddField(
model_name='mascotas',
name='relacion_p_humanos',
field=models.CharField(blank=True, choices=[('S', 'Si'), ('N', 'No')], max_length=20, null=True),
),
migrations.AddField(
model_name='mascotas',
name='relacion_p_machos',
field=models.CharField(blank=True, choices=[('S', 'Si'), ('N', 'No')], max_length=20, null=True),
),
migrations.AddField(
model_name='mascotas',
name='sociable_humanos',
field=models.CharField(blank=True, choices=[('S', 'Si'), ('N', 'No')], max_length=2, null=True),
),
migrations.AddField(
model_name='mascotas',
name='temeroso',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AddField(
model_name='mascotas',
name='tranquilo',
field=models.CharField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=3, null=True),
),
migrations.AlterField(
model_name='mascotas',
name='activo_web',
field=models.CharField(blank=True, choices=[('S', 'Si'), ('N', 'No')], max_length=20, null=True, verbose_name='Activo en Web'),
),
migrations.AlterField(
model_name='mascotas',
name='alergias',
field=models.CharField(blank=True, choices=[('S', 'Si'), ('N', 'No')], max_length=20, null=True),
),
migrations.AlterField(
model_name='mascotas',
name='enfermedad_cronica',
field=models.CharField(blank=True, choices=[('S', 'Si'), ('N', 'No')], max_length=20, null=True, verbose_name='enfermedad cronica'),
),
migrations.AlterField(
model_name='mascotas',
name='esterilizado',
field=models.CharField(blank=True, choices=[('S', 'Si'), ('N', 'No')], max_length=20, null=True),
),
migrations.AlterField(
model_name='mascotas',
name='numero_carnet',
field=models.CharField(blank=True, max_length=10, null=True, verbose_name='Numero Carnet'),
),
migrations.AlterField(
model_name='mascotas',
name='tratamiento',
field=models.CharField(blank=True, choices=[('S', 'Si'), ('N', 'No')], max_length=20, null=True),
),
migrations.DeleteModel(
name='personalidad_gato',
),
migrations.DeleteModel(
name='personalidad_perro',
),
]
| 49.634328
| 216
| 0.462036
| 766
| 6,651
| 3.926893
| 0.114883
| 0.068816
| 0.124335
| 0.15359
| 0.816157
| 0.806516
| 0.75266
| 0.75266
| 0.726729
| 0.726729
| 0
| 0.071633
| 0.265374
| 6,651
| 133
| 217
| 50.007519
| 0.544003
| 0.006766
| 0
| 0.700787
| 1
| 0
| 0.137038
| 0.003483
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007874
| 0
| 0.031496
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c4b419f447cfa12d45dbce12de5f2c22b4b4f500
| 51
|
py
|
Python
|
test_ramBlox.py
|
ponyatov/ramBlox
|
f6b5a79561c8139117d4ae46e30eaedb727c1aa3
|
[
"MIT"
] | null | null | null |
test_ramBlox.py
|
ponyatov/ramBlox
|
f6b5a79561c8139117d4ae46e30eaedb727c1aa3
|
[
"MIT"
] | null | null | null |
test_ramBlox.py
|
ponyatov/ramBlox
|
f6b5a79561c8139117d4ae46e30eaedb727c1aa3
|
[
"MIT"
] | null | null | null |
from ramBlox import *
def test_any(): assert True
| 12.75
| 27
| 0.745098
| 8
| 51
| 4.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 51
| 3
| 28
| 17
| 0.880952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c4d23497100d36f5479118a2bee71fc1f0a8253f
| 18
|
py
|
Python
|
testdata/html_in_string.py
|
mcmx73/syntaxhighlight
|
6fb591527d768f6e7a1bb403cf4ce572262de864
|
[
"BSD-3-Clause"
] | 223
|
2015-01-18T13:03:42.000Z
|
2022-02-06T05:17:54.000Z
|
testdata/html_in_string.py
|
mcmx73/syntaxhighlight
|
6fb591527d768f6e7a1bb403cf4ce572262de864
|
[
"BSD-3-Clause"
] | 15
|
2015-01-15T21:21:41.000Z
|
2020-03-10T13:54:14.000Z
|
testdata/html_in_string.py
|
mcmx73/syntaxhighlight
|
6fb591527d768f6e7a1bb403cf4ce572262de864
|
[
"BSD-3-Clause"
] | 22
|
2015-01-17T05:42:43.000Z
|
2020-12-02T11:08:10.000Z
|
"<h1>hello!</h1>"
| 9
| 17
| 0.5
| 3
| 18
| 3
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0.055556
| 18
| 1
| 18
| 18
| 0.411765
| 0.833333
| 0
| 0
| 0
| 0
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f2284aed69c5ecb7f79e06a130d75c826d83c6d7
| 219
|
py
|
Python
|
Core/GlobalExceptions.py
|
jbzdarkid/TwitchLink
|
c7bae13b46c7e6af7dc74539fdbca9cbb01f4778
|
[
"MIT"
] | 26
|
2021-02-04T00:29:21.000Z
|
2022-03-25T17:14:43.000Z
|
Core/GlobalExceptions.py
|
jbzdarkid/TwitchLink
|
c7bae13b46c7e6af7dc74539fdbca9cbb01f4778
|
[
"MIT"
] | 19
|
2021-02-04T01:27:07.000Z
|
2022-03-19T16:22:46.000Z
|
Core/GlobalExceptions.py
|
jbzdarkid/TwitchLink
|
c7bae13b46c7e6af7dc74539fdbca9cbb01f4778
|
[
"MIT"
] | 10
|
2021-06-08T17:41:40.000Z
|
2022-03-28T22:38:40.000Z
|
class Exceptions:
class NetworkError(Exception):
def __str__(self):
return "Network Error"
class FileSystemError(Exception):
def __str__(self):
return "File System Error"
| 27.375
| 38
| 0.625571
| 21
| 219
| 6.142857
| 0.619048
| 0.186047
| 0.232558
| 0.294574
| 0.387597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296804
| 219
| 8
| 38
| 27.375
| 0.837662
| 0
| 0
| 0.285714
| 0
| 0
| 0.136364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.285714
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
48409726abee432d1f19b7fb081eb040798714f1
| 60,466
|
py
|
Python
|
tests/evidence_string_generation/test_evidence_strings.py
|
sundarvenkata-EBI/eva-cttv-pipeline
|
0bcb416919ee29fa40a65426545073900811d4ee
|
[
"Apache-2.0"
] | null | null | null |
tests/evidence_string_generation/test_evidence_strings.py
|
sundarvenkata-EBI/eva-cttv-pipeline
|
0bcb416919ee29fa40a65426545073900811d4ee
|
[
"Apache-2.0"
] | null | null | null |
tests/evidence_string_generation/test_evidence_strings.py
|
sundarvenkata-EBI/eva-cttv-pipeline
|
0bcb416919ee29fa40a65426545073900811d4ee
|
[
"Apache-2.0"
] | null | null | null |
import os.path
import gzip
import json
import unittest
from datetime import datetime
from types import SimpleNamespace
from eva_cttv_pipeline.evidence_string_generation import clinvar
from eva_cttv_pipeline.evidence_string_generation import clinvar_to_evidence_strings
from eva_cttv_pipeline.evidence_string_generation import consequence_type as CT
from eva_cttv_pipeline.evidence_string_generation import evidence_strings
from tests.evidence_string_generation import test_clinvar_to_evidence_strings
from tests.evidence_string_generation import config
DATE_LAST_UPDATED = 1412982000000
DATE_ASSERTED = datetime.fromtimestamp((DATE_LAST_UPDATED / 1000)).isoformat()
MAPPINGS = SimpleNamespace()
def _get_test_cellbase_record_gene():
return {"chromosome":"3","start":150645894,"end":150645894,"reference":"A","alternate":"C","source":"clinvar","clinvarSet":{"recordStatus":"current","title":"NM_001195794.1(CLRN1):c.567T>G (p.Tyr189Ter) AND Usher syndrome, type 3","referenceClinVarAssertion":{"clinVarAccession":{"acc":"RCV000004642","version":3,"type":"RCV","dateUpdated":1435446000000},"recordStatus":"current","clinicalSignificance":{"reviewStatus":"CRITERIA_PROVIDED_SINGLE_SUBMITTER","description":"Pathogenic","dateLastEvaluated":1435100400000},"assertion":{"type":"VARIATION_TO_DISEASE"},"attributeSet":[{"attribute":{"value":"Autosomal recessive inheritance","integerValue":263,"type":"ModeOfInheritance"},"xref":[{"db":"Laboratory for Molecular Medicine, Partners HealthCare Personalized Medicine","id":"11483565","status":"CURRENT"}]}],"observedIn":[{"sample":{"origin":"germline","species":{"value":"human","taxonomyId":9606},"affectedStatus":"not provided"},"method":[{"methodType":"CLINICAL_TESTING"},{"methodType":"LITERATURE_ONLY"}],"observedData":[{"attribute":{"integerValue":2,"type":"NumFamiliesWithVariant"},"id":6557764},{"attribute":{"value":"not provided","type":"Description"},"id":6557764},{"attribute":{"value":"Fields et al. (2002) demonstrated that the Fin(major) USH3A mutation in exon 3 of the USH3A gene, which had been identified by Joensuu et al. (2001) as 300C-T (TYR100TER), should be referred to as 528T-G, resulting in a tyr176-to-ter substitution. Joensuu et al. (2001) had identified homozygosity for this mutation in a Finnish family segregating Usher syndrome type IIIA (USH3A; 276902) and found it in a further 52 Finnish patients. Fields et al. (2002) found this mutation in 11 of 28 mutated alleles from affected individuals of Finnish and other northern European ancestry.","type":"Description"},"citation":[{"id":[{"value":"11524702","source":"PubMed"}],"type":"general"},{"id":[{"value":"12145752","source":"PubMed"}],"type":"general"}],"id":6557764},{"attribute":{"integerValue":3,"type":"VariantAlleles"},"id":6557764}]}],"measureSet":{"measure":[{"name":[{"elementValue":{"value":"NM_001195794.1(CLRN1):c.567T>G (p.Tyr189Ter)","type":"Preferred"}}],"attributeSet":[{"attribute":{"value":"0.000076887590","type":"AlleleFrequency"},"xref":[{"db":"dbSNP","id":"121908140","status":"CURRENT"},{"db":"NHLBI GO Exome Sequencing Project (ESP)","id":"ESP6500SI-V2","url":"http://evs.gs.washington.edu/EVS/","status":"CURRENT"}]},{"attribute":{"value":"NM_174878.2:c.528T>G","type":"HGVS, coding","change":"c.528T>G"}},{"attribute":{"value":"NM_001256819.1:c.*142T>G","type":"HGVS, coding, RefSeq","change":"c.*142T>G"}},{"attribute":{"value":"NM_052995.2:c.300T>G","type":"HGVS, coding, RefSeq","change":"c.300T>G"}},{"attribute":{"value":"NM_001195794.1:c.567T>G","type":"HGVS, coding, RefSeq","change":"c.567T>G"}},{"attribute":{"value":"NG_009168.1:g.49893T>G","type":"HGVS, genomic, RefSeqGene","change":"g.49893T>G"}},{"attribute":{"value":"NC_000003.12:g.150928107A>C","integerValue":38,"type":"HGVS, genomic, top level","change":"g.150928107A>C"}},{"attribute":{"value":"NC_000003.11:g.150645894A>C","integerValue":37,"type":"HGVS, genomic, top level, previous","change":"g.150645894A>C"}},{"attribute":{"value":"NR_046380.2:n.1009T>G","type":"HGVS, non-coding","change":"n.1009T>G"}},{"attribute":{"value":"NR_046380.1:n.1010T>G","type":"HGVS, previous","change":"n.1010T>G"}},{"attribute":{"value":"p.Tyr176X","type":"HGVS, protein"}},{"attribute":{"value":"NP_443721.1:p.Tyr100Ter","type":"HGVS, protein, RefSeq","change":"p.Tyr100Ter"},"xref":[{"db":"dbSNP","id":"121908140","type":"rs","status":"CURRENT"}]},{"attribute":{"value":"NP_777367.1:p.Tyr176Ter","type":"HGVS, protein, RefSeq","change":"p.Tyr176Ter"},"xref":[{"db":"dbSNP","id":"121908140","type":"rs","status":"CURRENT"}]},{"attribute":{"value":"NP_001182723.1:p.Tyr189Ter","type":"HGVS, protein, RefSeq","change":"p.Tyr189Ter"},"xref":[{"db":"dbSNP","id":"121908140","type":"rs","status":"CURRENT"}]},{"attribute":{"value":"NM_174878.2:EXON 3","type":"Location"}},{"attribute":{"value":"3 prime UTR variant","type":"MolecularConsequence"},"xref":[{"db":"Sequence Ontology","id":"SO:0001624","status":"CURRENT"},{"db":"RefSeq","id":"NM_001256819.1:c.*142T>G","status":"CURRENT"}]},{"attribute":{"value":"nonsense","type":"MolecularConsequence"},"xref":[{"db":"Sequence Ontology","id":"SO:0001587","status":"CURRENT"},{"db":"RefSeq","id":"NM_001195794.1:c.567T>G","status":"CURRENT"}]},{"attribute":{"value":"non-coding transcript variant","type":"MolecularConsequence"},"xref":[{"db":"Sequence Ontology","id":"SO:0001619","status":"CURRENT"},{"db":"RefSeq","id":"NR_046380.2:n.1009T>G","status":"CURRENT"}]},{"attribute":{"value":"Y176*","type":"ProteinChange1LetterCode"},"xref":[{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"}]},{"attribute":{"value":"Y100*","type":"ProteinChange1LetterCode"}},{"attribute":{"value":"Y189*","type":"ProteinChange1LetterCode"}},{"attribute":{"value":"TYR176TER","type":"ProteinChange3LetterCode"},"xref":[{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"}]}],"cytogeneticLocation":["3q25.1"],"sequenceLocation":[{"assembly":"GRCh38","chr":"3","accession":"NC_000003.12","start":150928107,"stop":150928107,"displayStart":150928107,"displayStop":150928107,"variantLength":1,"referenceAllele":"A","alternateAllele":"C","assemblyAccessionVersion":"GCF_000001405.26","assemblyStatus":"current"},{"assembly":"GRCh37","chr":"3","accession":"NC_000003.11","start":150645894,"stop":150645894,"displayStart":150645894,"displayStop":150645894,"variantLength":1,"referenceAllele":"A","alternateAllele":"C","assemblyAccessionVersion":"GCF_000001405.25","assemblyStatus":"previous"}],"measureRelationship":[{"name":[{"elementValue":{"value":"clarin 1","type":"Preferred"}}],"symbol":[{"elementValue":{"value":"CLRN1","type":"Preferred"}}],"sequenceLocation":[{"assembly":"GRCh38","chr":"3","accession":"NC_000003.12","start":150918910,"stop":150973019,"displayStart":150918910,"displayStop":150973019,"strand":"-","variantLength":46837,"assemblyAccessionVersion":"GCF_000001405.26","assemblyStatus":"current"},{"assembly":"GRCh37","chr":"3","accession":"NC_000003.11","start":150643949,"stop":150690785,"displayStart":150643949,"displayStop":150690785,"strand":"-","variantLength":46837,"assemblyAccessionVersion":"GCF_000001405.25","assemblyStatus":"previous"}],"type":"variant in gene","xref":[{"db":"Gene","id":"7401","status":"CURRENT"},{"db":"OMIM","id":"606397","type":"MIM","status":"CURRENT"}]}],"type":"single nucleotide variant","id":19431,"xref":[{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"},{"db":"dbSNP","id":"121908140","type":"rs","status":"CURRENT"}]}],"name":[{"elementValue":{"value":"NM_001195794.1(CLRN1):c.567T>G (p.Tyr189Ter)","type":"Preferred"}}],"type":"Variant","id":4392},"traitSet":{"trait":[{"name":[{"elementValue":{"value":"Usher syndrome, type 3","type":"Preferred"},"xref":[{"db":"Genetic Alliance","id":"Usher+syndrome%2C+type+3/7326","status":"CURRENT"},{"db":"Office of Rare Diseases","id":"5442","status":"CURRENT"}]},{"elementValue":{"value":"Usher Syndrome, Type III","type":"Alternate"}},{"elementValue":{"value":"USHER SYNDROME, TYPE IIIA","type":"Alternate"},"xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"},{"db":"OMIM","id":"606397.0002","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0007","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0004","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0005","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0003","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0008","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0006","type":"Allelic variant","status":"CURRENT"}]},{"elementValue":{"value":"Usher syndrome, type 3A","type":"Alternate"}},{"elementValue":{"value":"Orphanet:886","type":"EFO id"}},{"elementValue":{"value":"Usher syndrome","type":"EFO name"}},{"elementValue":{"value":"http://www.orpha.net/ORDO/Orphanet_886","type":"EFO URL"}}],"symbol":[{"elementValue":{"value":"USH3","type":"Preferred"},"xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"},{"db":"Office of Rare Diseases","id":"5442","status":"CURRENT"}]},{"elementValue":{"value":"USH3A","type":"Alternate"},"xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"},{"db":"Office of Rare Diseases","id":"5442","status":"CURRENT"}]}],"attributeSet":[{"attribute":{"value":"Neonatal/infancy","type":"age of onset"},"xref":[{"db":"Orphanet","id":"886","status":"CURRENT"},{"db":"Orphanet","id":"231183","status":"CURRENT"}]}],"citation":[{"id":[{"value":"21697857","source":"PubMed"}],"type":"Translational/Evidence-based","abbrev":"EuroGenetest, 2011"}],"type":"Disease","id":5092,"xref":[{"db":"MedGen","id":"C1568248","status":"CURRENT"},{"db":"Orphanet","id":"231183","status":"CURRENT"},{"db":"Orphanet","id":"886","status":"CURRENT"},{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"}]}],"type":"Disease","id":1209},"dateCreated":1344812400000,"dateLastUpdated":1435359600000,"id":62145},"clinVarAssertion":[{"clinVarSubmissionID":{"submitter":"OMIM","title":"CLRN1, TYR176TER_USHER SYNDROME, TYPE IIIA","localKey":"606397.0001_USHER SYNDROME, TYPE IIIA","submitterDate":1435100400000},"clinVarAccession":{"acc":"SCV000024816","version":2,"type":"SCV","orgID":3,"dateUpdated":1435359600000},"recordStatus":"current","clinicalSignificance":{"reviewStatus":"NO_ASSERTION_CRITERIA_PROVIDED","description":["Pathogenic"],"dateLastEvaluated":1435100400000},"assertion":{"type":"variation to disease"},"externalID":{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"},"observedIn":[{"sample":{"origin":"germline","species":{"value":"human"},"affectedStatus":"not provided"},"method":[{"methodType":"LITERATURE_ONLY"}],"observedData":[{"attribute":{"value":"Fields et al. (2002) demonstrated that the Fin(major) USH3A mutation in exon 3 of the USH3A gene, which had been identified by Joensuu et al. (2001) as 300C-T (TYR100TER), should be referred to as 528T-G, resulting in a tyr176-to-ter substitution. Joensuu et al. (2001) had identified homozygosity for this mutation in a Finnish family segregating Usher syndrome type IIIA (USH3A; 276902) and found it in a further 52 Finnish patients. Fields et al. (2002) found this mutation in 11 of 28 mutated alleles from affected individuals of Finnish and other northern European ancestry.","type":"Description"},"citation":[{"id":[{"value":"12145752","source":"PubMed"}]},{"id":[{"value":"11524702","source":"PubMed"}]}],"xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"}]}]}],"measureSet":{"measure":[{"name":[{"elementValue":{"value":"CLRN1, TYR176TER","type":"Preferred"}}],"attributeSet":[{"attribute":{"value":"TYR176TER","type":"NonHGVS"}}],"measureRelationship":[{"symbol":[{"elementValue":{"value":"CLRN1","type":"Preferred"}}],"type":"variant in gene"}],"type":"Variation","xref":[{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"}]}],"type":"Variant"},"traitSet":{"trait":[{"name":[{"elementValue":{"value":"USHER SYNDROME, TYPE IIIA","type":"Preferred"}}],"type":"Disease"}],"type":"Disease"},"id":24816},{"clinVarSubmissionID":{"submitter":"Laboratory for Molecular Medicine,Partners HealthCare Personalized Medicine","localKey":"11483565|OMIM:276902","submitterDate":1422489600000},"clinVarAccession":{"acc":"SCV000203992","version":1,"type":"SCV","orgID":21766,"dateUpdated":1422576000000},"recordStatus":"current","clinicalSignificance":{"reviewStatus":"CLASSIFIED_BY_SINGLE_SUBMITTER","description":["Pathogenic"],"citation":[{"id":[{"value":"11524702","source":"PubMed"}]}],"comment":[{"value":"The Tyr176X variant in CLRN1 has been previously identified in 52 homozygous and 2 compound heterozygous individuals with Usher syndrome type III (Joensuu 2001). This variant has been identified in 1/8,600 European American chromosomes by the NHLBI Exome Sequencing Project (http://evs.gs.washington.edu/EVS/; dbSNP rs121908140). Although this variant has been seen in the general population, its frequency is low enough to be consistent with a recessive carrier frequency. This nonsense variant leads to a premature termination codon at position 176, which is predicted to lead to a truncated or absent protein. In summary, this variant meets our criteria to be classified as pathogenic in a recessive manner for Usher syndrome (http://pcpgm.partners.org/LMM)."}],"dateLastEvaluated":1388620800000},"assertion":{"type":"variation to disease"},"externalID":{"db":"Laboratory for Molecular Medicine (Partners HealthCare Personalized Medicine)","id":"11483565","status":"CURRENT"},"attributeSet":[{"attribute":{"value":"Autosomal recessive inheritance","type":"ModeOfInheritance"}}],"observedIn":[{"sample":{"origin":"germline","species":{"value":"human","taxonomyId":9606},"affectedStatus":"not provided","familyData":{"numFamiliesWithVariant":2}},"method":[{"methodType":"CLINICAL_TESTING"}],"observedData":[{"attribute":{"integerValue":3,"type":"VariantAlleles"}}]}],"measureSet":{"measure":[{"name":[{"elementValue":{"value":"NM_174878.2:c.528T>G","type":"Alternate"}},{"elementValue":{"value":"p.Tyr176X","type":"Alternate"}}],"attributeSet":[{"attribute":{"value":"NM_174878.2:EXON 3","type":"Location"}},{"attribute":{"value":"NC_000003.11:g.150645894A>C","type":"HGVS"}}],"sequenceLocation":[{"assembly":"GRCh37","chr":"3","start":150645894,"stop":150645894,"variantLength":1,"referenceAllele":"A","alternateAllele":"C"}],"measureRelationship":[{"symbol":[{"elementValue":{"value":"CLRN1","type":"Preferred"}}],"type":"variant in gene"}],"type":"Variation","xref":[{"db":"dbSNP","id":"121908140","type":"rsNumber","status":"CURRENT"}]}],"type":"Variant"},"traitSet":{"trait":[{"name":[{"elementValue":{"value":"Usher syndrome, type 3A","type":"Preferred"}}],"type":"Disease","xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"}]}],"type":"Disease"},"submissionName":"LMM_all.variants_NCBI_3.16.2013","id":366075}],"id":6973966}}
def get_args_CTTVGeneticsEvidenceString_init():
clinvarRecord = \
clinvar.ClinvarRecord({"recordStatus": "current", "title": "NM_001195794.1(CLRN1):c.567T>G (p.Tyr189Ter) AND Usher syndrome, type 3", "referenceClinVarAssertion":{"clinVarAccession":{"acc": "RCV000004642", "version":3, "type": "RCV", "dateUpdated":1435446000000}, "recordStatus": "current", "clinicalSignificance":{"reviewStatus": "CRITERIA_PROVIDED_SINGLE_SUBMITTER", "description": "Pathogenic", "dateLastEvaluated":1435100400000}, "assertion":{"type": "VARIATION_TO_DISEASE"}, "attributeSet":[{"attribute":{"value": "Autosomal recessive inheritance", "integerValue":263, "type":"ModeOfInheritance"}, "xref":[{"db":"Laboratory for Molecular Medicine, Partners HealthCare Personalized Medicine","id":"11483565","status":"CURRENT"}]}], "observedIn":[{"sample":{"origin":"germline","species":{"value":"human","taxonomyId":9606},"affectedStatus":"not provided"},"method":[{"methodType":"CLINICAL_TESTING"},{"methodType":"LITERATURE_ONLY"}],"observedData":[{"attribute":{"integerValue":2,"type":"NumFamiliesWithVariant"},"id":6557764},{"attribute":{"value":"not provided","type":"Description"},"id":6557764},{"attribute":{"value":"Fields et al. (2002) demonstrated that the Fin(major) USH3A mutation in exon 3 of the USH3A gene, which had been identified by Joensuu et al. (2001) as 300C-T (TYR100TER), should be referred to as 528T-G, resulting in a tyr176-to-ter substitution. Joensuu et al. (2001) had identified homozygosity for this mutation in a Finnish family segregating Usher syndrome type IIIA (USH3A; 276902) and found it in a further 52 Finnish patients. Fields et al. (2002) found this mutation in 11 of 28 mutated alleles from affected individuals of Finnish and other northern European ancestry.","type":"Description"},"citation":[{"id":[{"value":"11524702","source":"PubMed"}],"type":"general"},{"id":[{"value":"12145752","source":"PubMed"}],"type":"general"}],"id":6557764},{"attribute":{"integerValue":3,"type":"VariantAlleles"},"id":6557764}]}], "measureSet":{"measure":[{"name":[{"elementValue":{"value":"NM_001195794.1(CLRN1):c.567T>G (p.Tyr189Ter)","type":"Preferred"}}],"attributeSet":[{"attribute":{"value":"0.000076887590","type":"AlleleFrequency"},"xref":[{"db":"dbSNP","id":"121908140","status":"CURRENT"},{"db":"NHLBI GO Exome Sequencing Project (ESP)","id":"ESP6500SI-V2","url":"http://evs.gs.washington.edu/EVS/","status":"CURRENT"}]},{"attribute":{"value":"NM_174878.2:c.528T>G","type":"HGVS, coding","change":"c.528T>G"}},{"attribute":{"value":"NM_001256819.1:c.*142T>G","type":"HGVS, coding, RefSeq","change":"c.*142T>G"}},{"attribute":{"value":"NM_052995.2:c.300T>G","type":"HGVS, coding, RefSeq","change":"c.300T>G"}},{"attribute":{"value":"NM_001195794.1:c.567T>G","type":"HGVS, coding, RefSeq","change":"c.567T>G"}},{"attribute":{"value":"NG_009168.1:g.49893T>G","type":"HGVS, genomic, RefSeqGene","change":"g.49893T>G"}},{"attribute":{"value":"NC_000003.12:g.150928107A>C","integerValue":38,"type":"HGVS, genomic, top level","change":"g.150928107A>C"}},{"attribute":{"value":"NC_000003.11:g.150645894A>C","integerValue":37,"type":"HGVS, genomic, top level, previous","change":"g.150645894A>C"}},{"attribute":{"value":"NR_046380.2:n.1009T>G","type":"HGVS, non-coding","change":"n.1009T>G"}},{"attribute":{"value":"NR_046380.1:n.1010T>G","type":"HGVS, previous","change":"n.1010T>G"}},{"attribute":{"value":"p.Tyr176X","type":"HGVS, protein"}},{"attribute":{"value":"NP_443721.1:p.Tyr100Ter","type":"HGVS, protein, RefSeq","change":"p.Tyr100Ter"},"xref":[{"db":"dbSNP","id":"121908140","type":"rs","status":"CURRENT"}]},{"attribute":{"value":"NP_777367.1:p.Tyr176Ter","type":"HGVS, protein, RefSeq","change":"p.Tyr176Ter"},"xref":[{"db":"dbSNP","id":"121908140","type":"rs","status":"CURRENT"}]},{"attribute":{"value":"NP_001182723.1:p.Tyr189Ter","type":"HGVS, protein, RefSeq","change":"p.Tyr189Ter"},"xref":[{"db":"dbSNP","id":"121908140","type":"rs","status":"CURRENT"}]},{"attribute":{"value":"NM_174878.2:EXON 3","type":"Location"}},{"attribute":{"value":"3 prime UTR variant","type":"MolecularConsequence"},"xref":[{"db":"Sequence Ontology","id":"SO:0001624","status":"CURRENT"},{"db":"RefSeq","id":"NM_001256819.1:c.*142T>G","status":"CURRENT"}]},{"attribute":{"value":"nonsense","type":"MolecularConsequence"},"xref":[{"db":"Sequence Ontology","id":"SO:0001587","status":"CURRENT"},{"db":"RefSeq","id":"NM_001195794.1:c.567T>G","status":"CURRENT"}]},{"attribute":{"value":"non-coding transcript variant","type":"MolecularConsequence"},"xref":[{"db":"Sequence Ontology","id":"SO:0001619","status":"CURRENT"},{"db":"RefSeq","id":"NR_046380.2:n.1009T>G","status":"CURRENT"}]},{"attribute":{"value":"Y176*","type":"ProteinChange1LetterCode"},"xref":[{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"}]},{"attribute":{"value":"Y100*","type":"ProteinChange1LetterCode"}},{"attribute":{"value":"Y189*","type":"ProteinChange1LetterCode"}},{"attribute":{"value":"TYR176TER","type":"ProteinChange3LetterCode"},"xref":[{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"}]}],"cytogeneticLocation":["3q25.1"],"sequenceLocation":[{"assembly":"GRCh38","chr":"3","accession":"NC_000003.12","start":150928107,"stop":150928107,"displayStart":150928107,"displayStop":150928107,"variantLength":1,"referenceAllele":"A","alternateAllele":"C","assemblyAccessionVersion":"GCF_000001405.26","assemblyStatus":"current"},{"assembly":"GRCh37","chr":"3","accession":"NC_000003.11","start":150645894,"stop":150645894,"displayStart":150645894,"displayStop":150645894,"variantLength":1,"referenceAllele":"A","alternateAllele":"C","assemblyAccessionVersion":"GCF_000001405.25","assemblyStatus":"previous"}],"measureRelationship":[{"name":[{"elementValue":{"value":"clarin 1","type":"Preferred"}}],"symbol":[{"elementValue":{"value":"CLRN1","type":"Preferred"}}],"sequenceLocation":[{"assembly":"GRCh38","chr":"3","accession":"NC_000003.12","start":150918910,"stop":150973019,"displayStart":150918910,"displayStop":150973019,"strand":"-","variantLength":46837,"assemblyAccessionVersion":"GCF_000001405.26","assemblyStatus":"current"},{"assembly":"GRCh37","chr":"3","accession":"NC_000003.11","start":150643949,"stop":150690785,"displayStart":150643949,"displayStop":150690785,"strand":"-","variantLength":46837,"assemblyAccessionVersion":"GCF_000001405.25","assemblyStatus":"previous"}],"type":"variant in gene","xref":[{"db":"Gene","id":"7401","status":"CURRENT"},{"db":"OMIM","id":"606397","type":"MIM","status":"CURRENT"}]}],"type":"single nucleotide variant","id":19431,"xref":[{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"},{"db":"dbSNP","id":"121908140","type":"rs","status":"CURRENT"}]}],"name":[{"elementValue":{"value":"NM_001195794.1(CLRN1):c.567T>G (p.Tyr189Ter)","type":"Preferred"}}],"type":"Variant","id":4392}, "traitSet":{"trait":[{"name":[{"elementValue":{"value":"Usher syndrome, type 3","type":"Preferred"},"xref":[{"db":"Genetic Alliance","id":"Usher+syndrome%2C+type+3/7326","status":"CURRENT"},{"db":"Office of Rare Diseases","id":"5442","status":"CURRENT"}]},{"elementValue":{"value":"Usher Syndrome, Type III","type":"Alternate"}},{"elementValue":{"value":"USHER SYNDROME, TYPE IIIA","type":"Alternate"},"xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"},{"db":"OMIM","id":"606397.0002","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0007","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0004","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0005","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0003","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0008","type":"Allelic variant","status":"CURRENT"},{"db":"OMIM","id":"606397.0006","type":"Allelic variant","status":"CURRENT"}]},{"elementValue":{"value":"Usher syndrome, type 3A","type":"Alternate"}},{"elementValue":{"value":"Orphanet:886","type":"EFO id"}},{"elementValue":{"value":"Usher syndrome","type":"EFO name"}},{"elementValue":{"value":"http://www.orpha.net/ORDO/Orphanet_886","type":"EFO URL"}}],"symbol":[{"elementValue":{"value":"USH3","type":"Preferred"},"xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"},{"db":"Office of Rare Diseases","id":"5442","status":"CURRENT"}]},{"elementValue":{"value":"USH3A","type":"Alternate"},"xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"},{"db":"Office of Rare Diseases","id":"5442","status":"CURRENT"}]}],"attributeSet":[{"attribute":{"value":"Neonatal/infancy","type":"age of onset"},"xref":[{"db":"Orphanet","id":"886","status":"CURRENT"},{"db":"Orphanet","id":"231183","status":"CURRENT"}]}],"citation":[{"id":[{"value":"21697857","source":"PubMed"}],"type":"Translational/Evidence-based","abbrev":"EuroGenetest, 2011"}],"type":"Disease","id":5092,"xref":[{"db":"MedGen","id":"C1568248","status":"CURRENT"},{"db":"Orphanet","id":"231183","status":"CURRENT"},{"db":"Orphanet","id":"886","status":"CURRENT"},{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"}]}],"type":"Disease","id":1209}, "dateCreated":1344812400000, "dateLastUpdated":1435359600000, "id":62145}, "clinVarAssertion":[{"clinVarSubmissionID":{"submitter":"OMIM","title":"CLRN1, TYR176TER_USHER SYNDROME, TYPE IIIA","localKey":"606397.0001_USHER SYNDROME, TYPE IIIA","submitterDate":1435100400000},"clinVarAccession":{"acc":"SCV000024816","version":2,"type":"SCV","orgID":3,"dateUpdated":1435359600000},"recordStatus":"current","clinicalSignificance":{"reviewStatus":"NO_ASSERTION_CRITERIA_PROVIDED","description":["Pathogenic"],"dateLastEvaluated":1435100400000},"assertion":{"type":"variation to disease"},"externalID":{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"},"observedIn":[{"sample":{"origin":"germline","species":{"value":"human"},"affectedStatus":"not provided"},"method":[{"methodType":"LITERATURE_ONLY"}],"observedData":[{"attribute":{"value":"Fields et al. (2002) demonstrated that the Fin(major) USH3A mutation in exon 3 of the USH3A gene, which had been identified by Joensuu et al. (2001) as 300C-T (TYR100TER), should be referred to as 528T-G, resulting in a tyr176-to-ter substitution. Joensuu et al. (2001) had identified homozygosity for this mutation in a Finnish family segregating Usher syndrome type IIIA (USH3A; 276902) and found it in a further 52 Finnish patients. Fields et al. (2002) found this mutation in 11 of 28 mutated alleles from affected individuals of Finnish and other northern European ancestry.","type":"Description"},"citation":[{"id":[{"value":"12145752","source":"PubMed"}]},{"id":[{"value":"11524702","source":"PubMed"}]}],"xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"}]}]}],"measureSet":{"measure":[{"name":[{"elementValue":{"value":"CLRN1, TYR176TER","type":"Preferred"}}],"attributeSet":[{"attribute":{"value":"TYR176TER","type":"NonHGVS"}}],"measureRelationship":[{"symbol":[{"elementValue":{"value":"CLRN1","type":"Preferred"}}],"type":"variant in gene"}],"type":"Variation","xref":[{"db":"OMIM","id":"606397.0001","type":"Allelic variant","status":"CURRENT"}]}],"type":"Variant"},"traitSet":{"trait":[{"name":[{"elementValue":{"value":"USHER SYNDROME, TYPE IIIA","type":"Preferred"}}],"type":"Disease"}],"type":"Disease"},"id":24816},{"clinVarSubmissionID":{"submitter":"Laboratory for Molecular Medicine,Partners HealthCare Personalized Medicine","localKey":"11483565|OMIM:276902","submitterDate":1422489600000},"clinVarAccession":{"acc":"SCV000203992","version":1,"type":"SCV","orgID":21766,"dateUpdated":1422576000000},"recordStatus":"current","clinicalSignificance":{"reviewStatus":"CLASSIFIED_BY_SINGLE_SUBMITTER","description":["Pathogenic"],"citation":[{"id":[{"value":"11524702","source":"PubMed"}]}],"comment":[{"value":"The Tyr176X variant in CLRN1 has been previously identified in 52 homozygous and 2 compound heterozygous individuals with Usher syndrome type III (Joensuu 2001). This variant has been identified in 1/8,600 European American chromosomes by the NHLBI Exome Sequencing Project (http://evs.gs.washington.edu/EVS/; dbSNP rs121908140). Although this variant has been seen in the general population, its frequency is low enough to be consistent with a recessive carrier frequency. This nonsense variant leads to a premature termination codon at position 176, which is predicted to lead to a truncated or absent protein. In summary, this variant meets our criteria to be classified as pathogenic in a recessive manner for Usher syndrome (http://pcpgm.partners.org/LMM)."}],"dateLastEvaluated":1388620800000},"assertion":{"type":"variation to disease"},"externalID":{"db":"Laboratory for Molecular Medicine (Partners HealthCare Personalized Medicine)","id":"11483565","status":"CURRENT"},"attributeSet":[{"attribute":{"value":"Autosomal recessive inheritance","type":"ModeOfInheritance"}}],"observedIn":[{"sample":{"origin":"germline","species":{"value":"human","taxonomyId":9606},"affectedStatus":"not provided","familyData":{"numFamiliesWithVariant":2}},"method":[{"methodType":"CLINICAL_TESTING"}],"observedData":[{"attribute":{"integerValue":3,"type":"VariantAlleles"}}]}],"measureSet":{"measure":[{"name":[{"elementValue":{"value":"NM_174878.2:c.528T>G","type":"Alternate"}},{"elementValue":{"value":"p.Tyr176X","type":"Alternate"}}],"attributeSet":[{"attribute":{"value":"NM_174878.2:EXON 3","type":"Location"}},{"attribute":{"value":"NC_000003.11:g.150645894A>C","type":"HGVS"}}],"sequenceLocation":[{"assembly":"GRCh37","chr":"3","start":150645894,"stop":150645894,"variantLength":1,"referenceAllele":"A","alternateAllele":"C"}],"measureRelationship":[{"symbol":[{"elementValue":{"value":"CLRN1","type":"Preferred"}}],"type":"variant in gene"}],"type":"Variation","xref":[{"db":"dbSNP","id":"121908140","type":"rsNumber","status":"CURRENT"}]}],"type":"Variant"},"traitSet":{"trait":[{"name":[{"elementValue":{"value":"Usher syndrome, type 3A","type":"Preferred"}}],"type":"Disease","xref":[{"db":"OMIM","id":"276902","type":"MIM","status":"CURRENT"}]}],"type":"Disease"},"submissionName":"LMM_all.variants_NCBI_3.16.2013","id":366075}], "id":6973966})
report = clinvar_to_evidence_strings.Report()
trait = SimpleNamespace()
trait.trait_counter = 0
trait.clinvar_name = ""
trait.ontology_id = 'http://www.orpha.net/ORDO/Orphanet_88991'
trait.ontology_label = None
consequence_type = test_clinvar_to_evidence_strings.MAPPINGS.consequence_type_dict["rs121908140"][0]
test_args_1 = (clinvarRecord, clinvarRecord.measures[0], report, trait, consequence_type)
return test_args_1
# TODO look into why these failed with travis
class CTTVGeneticsEvidenceStringInitTest(unittest.TestCase):
maxDiff = None
def setUp(self):
self.test_args = get_args_CTTVGeneticsEvidenceString_init()
self.evidence_string = evidence_strings.CTTVGeneticsEvidenceString(*self.test_args)
def test_evidence_string(self):
test_dict = {
"literature": {
"references": [
{"lit_id": "http://europepmc.org/abstract/MED/11524702"},
{"lit_id": "http://europepmc.org/abstract/MED/12145752"},
{"lit_id": "http://europepmc.org/abstract/MED/21697857"}]
},
"disease": {"id": ["http://www.orpha.net/ORDO/Orphanet_886"]},
"validated_against_schema_version": "1.6.2",
"target": {
"target_type": "http://identifiers.org/cttv.target/gene_variant",
"id": "http://identifiers.org/ensembl/ENSG00000163646",
"activity": "http://identifiers.org/cttv.activity/unknown"
},
"sourceID": "eva",
"evidence": {
"gene2variant": {
"is_associated": True,
"provenance_type": {
"expert": {"status": True, "statement": "Primary submitter of data"},
"database": {
"id": "EVA",
"dbxref": {
"url": "http://identifiers.org/clinvar.record/RCV000004642",
"id": "http://identifiers.org/clinvar",
"version": "2017-08"
},
"version": "1.0"
}
},
"evidence_codes": ["http://identifiers.org/eco/cttv_mapping_pipeline"],
"date_asserted": "2015-06-26T23:00:00",
"functional_consequence": "http://purl.obolibrary.org/obo/SO_0001587",
"urls": [{"url": "http://www.ncbi.nlm.nih.gov/clinvar/RCV000004642",
"nice_name": "Further details in ClinVar database"}],
"resource_score": {
"type": "pvalue",
"method": {"description": "Not provided by data supplier"},
"value": 1e-07
}
},
"variant2disease": {
"is_associated": True,
"clinical_significance": "Pathogenic",
"provenance_type": {
"literature": {
"references": [
{"lit_id": "http://europepmc.org/abstract/MED/11524702"},
{"lit_id": "http://europepmc.org/abstract/MED/12145752"},
{"lit_id": "http://europepmc.org/abstract/MED/21697857"}]
},
"expert": {"status": True, "statement": "Primary submitter of data"},
"database": {
"id": "EVA",
"dbxref": {
"url": "http://identifiers.org/clinvar.record/RCV000004642",
"id": "http://identifiers.org/clinvar",
"version": "2017-08"
},
"version": "1.0"
}
},
"evidence_codes": ["http://purl.obolibrary.org/obo/ECO_0000205"],
"date_asserted": "2015-06-26T23:00:00",
"unique_experiment_reference": "http://europepmc.org/abstract/MED/11524702",
"urls": [{
"url": "http://www.ncbi.nlm.nih.gov/clinvar/RCV000004642",
"nice_name": "Further details in ClinVar database"
}],
"resource_score": {
"type": "pvalue",
"method": {"description": "Not provided by data supplier"},
"value": 1e-07
}
}
},
"type": "genetic_association", "access_level": "public",
"unique_association_fields": {
"gene": "ENSG00000163646",
"alleleOrigin": "germline",
"phenotype": "http://www.orpha.net/ORDO/Orphanet_886",
"clinvarAccession": "RCV000004642",
"variant_id": "rs121908140"
},
"variant": {"type": "snp single","id": "http://identifiers.org/dbsnp/rs121908140"}
}
test_ev_string = evidence_strings.CTTVEvidenceString(test_dict, trait=self.test_args[3])
self.assertEqual(self.evidence_string['access_level'], test_ev_string['access_level'])
self.assertEqual(self.evidence_string['evidence'], test_ev_string['evidence'])
self.assertEqual(self.evidence_string['literature'], test_ev_string['literature'])
self.assertEqual(self.evidence_string['disease'], test_ev_string['disease'])
self.assertEqual(self.evidence_string['sourceID'], test_ev_string['sourceID'])
self.assertEqual(self.evidence_string['unique_association_fields'], test_ev_string['unique_association_fields'])
self.assertEqual(self.evidence_string['target'], test_ev_string['target'])
self.assertEqual(self.evidence_string['type'], test_ev_string['type'])
self.assertEqual(self.evidence_string['variant'], test_ev_string['variant'])
self.assertEqual(self.evidence_string['validated_against_schema_version'], test_ev_string['validated_against_schema_version'])
self.assertEqual(self.evidence_string, test_ev_string)
def get_args_CTTVSomaticEvidenceString_init():
clinvarRecord = clinvar.ClinvarRecord({"recordStatus": "current", "title": "NM_000038.5(APC):c.4391_4394delAGAG (p.Glu1464Valfs) AND Periampullary adenoma", "referenceClinVarAssertion":{"clinVarAccession":{"acc": "RCV000000851", "version":4, "type": "RCV", "dateUpdated":1455667200000}, "recordStatus": "current", "clinicalSignificance":{"reviewStatus": "NO_ASSERTION_CRITERIA_PROVIDED", "description": "Pathogenic", "dateLastEvaluated":752112000000}, "assertion":{"type": "VARIATION_TO_DISEASE"}, "observedIn":[{"sample":{"origin": "somatic", "species":{"value":"human", "taxonomyId":9606}, "affectedStatus":"not provided"}, "method":[{"methodType":"LITERATURE_ONLY"}], "observedData":[{"attribute":{"value":"In tumor tissue of a periampullary adenoma from a patient with FAP (175100), Bapat et al. (1993) identified a somatic 4-bp deletion (AGAG) at codon 1464 of the APC gene. The patient had a germline APC mutation (611731.0023).","type":"Description"},"citation":[{"id":[{"value":"8281160","source":"PubMed"}],"type":"general"}],"id":9728450}]}], "measureSet":{"measure":[{"name":[{"elementValue":{"value":"NM_000038.5(APC):c.4391_4394delAGAG (p.Glu1464Valfs)","type":"Preferred"}}],"attributeSet":[{"attribute":{"value":"NM_001127511.2:c.4337_4340delAGAG","type":"HGVS, coding, RefSeq","change":"c.4337_4340delAGAG"}},{"attribute":{"value":"NM_000038.5:c.4391_4394delAGAG","type":"HGVS, coding, RefSeq","change":"c.4391_4394delAGAG"}},{"attribute":{"value":"NM_001127510.2:c.4391_4394delAGAG","type":"HGVS, coding, RefSeq","change":"c.4391_4394delAGAG"}},{"attribute":{"value":"LRG_130:g.152465_152468delAGAG","type":"HGVS, genomic, LRG","change":"g.152465_152468delAGAG"}},{"attribute":{"value":"NG_008481.4:g.152465_152468delAGAG","type":"HGVS, genomic, RefSeqGene","change":"g.152465_152468delAGAG"},"citation":[{"id":[{"value":"8281160","source":"PubMed"}],"type":"general"}]},{"attribute":{"value":"NC_000005.10:g.112839985_112839988delAGAG","integerValue":38,"type":"HGVS, genomic, top level","change":"g.112839985_112839988delAGAG"}},{"attribute":{"value":"NC_000005.9:g.112175682_112175685delAGAG","integerValue":37,"type":"HGVS, genomic, top level, previous","change":"g.112175682_112175685delAGAG"}},{"attribute":{"value":"NM_000038.4:c.4391_4394delAGAG","type":"HGVS, previous","change":"c.4391_4394delAGAG"}},{"attribute":{"value":"LRG_130p1:p.Glu1464Valfs","type":"HGVS, protein","change":"p.Glu1464Valfs"}},{"attribute":{"value":"LRG_130p2:p.Glu1464Valfs","type":"HGVS, protein","change":"p.Glu1464Valfs"}},{"attribute":{"value":"NP_001120983.2:p.Glu1446Valfs","type":"HGVS, protein, RefSeq","change":"p.Glu1446Valfs"}},{"attribute":{"value":"NP_000029.2:p.Glu1464Valfs","type":"HGVS, protein, RefSeq","change":"p.Glu1464Valfs"}},{"attribute":{"value":"NP_001120982.1:p.Glu1464Valfs","type":"HGVS, protein, RefSeq","change":"p.Glu1464Valfs"}},{"attribute":{"value":"frameshift variant","type":"MolecularConsequence"},"xref":[{"db":"Sequence Ontology","id":"SO:0001589","status":"CURRENT"},{"db":"RefSeq","id":"NM_000038.5:c.4391_4394delAGAG","status":"CURRENT"}]}],"cytogeneticLocation":["5q22.2"],"sequenceLocation":[{"assembly":"GRCh38","chr":"5","accession":"NC_000005.10","start":112839985,"stop":112839988,"displayStart":112839985,"displayStop":112839988,"variantLength":4,"referenceAllele":"AGAG","alternateAllele":"-","assemblyAccessionVersion":"GCF_000001405.28","assemblyStatus":"current"},{"assembly":"GRCh37","chr":"5","accession":"NC_000005.9","start":112175682,"stop":112175685,"displayStart":112175682,"displayStop":112175685,"variantLength":4,"referenceAllele":"AGAG","alternateAllele":"-","assemblyAccessionVersion":"GCF_000001405.25","assemblyStatus":"previous"}],"measureRelationship":[{"name":[{"elementValue":{"value":"adenomatous polyposis coli","type":"Preferred"}}],"symbol":[{"elementValue":{"value":"APC","type":"Preferred"}}],"attributeSet":[{"attribute":{"value":"Sufficient evidence for dosage pathogenicity","dateValue":1341529200000,"type":"Haploinsufficiency"},"citation":[{"url":"http://www.ncbi.nlm.nih.gov/projects/dbvar/ISCA/isca_gene.cgi?sym=APC"}]},{"attribute":{"value":"No evidence available","dateValue":1341529200000,"type":"Triplosensitivity"},"citation":[{"url":"http://www.ncbi.nlm.nih.gov/projects/dbvar/ISCA/isca_gene.cgi?sym=APC"}]}],"sequenceLocation":[{"assembly":"GRCh38","chr":"5","accession":"NC_000005.10","start":112707504,"stop":112846238,"displayStart":112707504,"displayStop":112846238,"strand":"+","variantLength":138735,"assemblyAccessionVersion":"GCF_000001405.28","assemblyStatus":"current"},{"assembly":"GRCh37","chr":"5","accession":"NC_000005.9","start":112043201,"stop":112181935,"displayStart":112043201,"displayStop":112181935,"strand":"+","variantLength":138735,"assemblyAccessionVersion":"GCF_000001405.25","assemblyStatus":"previous"}],"comment":[{"value":"This gene is cited in the ACMG recommendations of 2013 (PubMed 23788249) for reporting incidental findings in exons.","dataSource":"NCBI curation","type":"PUBLIC"}],"type":"variant in gene","xref":[{"db":"Gene","id":"324","status":"CURRENT"},{"db":"OMIM","id":"611731","type":"MIM","status":"CURRENT"},{"db":"HGNC","id":"HGNC:583","status":"CURRENT"}]}],"type":"Deletion","id":15851,"xref":[{"db":"OMIM","id":"611731.0020","type":"Allelic variant","status":"CURRENT"},{"db":"dbSNP","id":"387906235","type":"rs","status":"CURRENT"}]}],"name":[{"elementValue":{"value":"NM_000038.5(APC):c.4391_4394delAGAG (p.Glu1464Valfs)","type":"Preferred"}}],"type":"Variant","id":812}, "traitSet":{"trait":[{"name":[{"elementValue":{"value":"Periampullary adenoma","type":"Preferred"}},{"elementValue":{"value":"EFO:0000232","type":"EFO id"}},{"elementValue":{"value":"adenoma","type":"EFO name"}},{"elementValue":{"value":"http://www.ebi.ac.uk/efo/EFO_0000232","type":"EFO URL"}}],"attributeSet":[{"attribute":{"value":"Neoplasm","type":"keyword"}}],"type":"Disease","id":9669,"xref":[{"db":"MedGen","id":"CN068444","status":"CURRENT"}]}],"type":"Disease","id":210}, "dateCreated":1344812400000, "dateLastUpdated":1455667200000, "id":58354}, "clinVarAssertion":[{"clinVarSubmissionID":{"submitter":"OMIM","title":"APC, 4-BP DEL, CODON 1464_ADENOMA, PERIAMPULLARY, SOMATIC","localKey":"611731.0020_ADENOMA, PERIAMPULLARY, SOMATIC","submitterDate":1365030000000},"clinVarAccession":{"acc":"SCV000021001","version":1,"type":"SCV","orgID":3,"dateUpdated":1444518000000},"recordStatus":"current","clinicalSignificance":{"reviewStatus":"NO_ASSERTION_CRITERIA_PROVIDED","description":["Pathogenic"],"dateLastEvaluated":752112000000},"assertion":{"type":"variation to disease"},"externalID":{"db":"OMIM","id":"611731.0020","type":"Allelic variant","status":"CURRENT"},"observedIn":[{"sample":{"origin":"somatic","species":{"value":"human"},"affectedStatus":"not provided"},"method":[{"methodType":"LITERATURE_ONLY"}],"observedData":[{"attribute":{"value":"In tumor tissue of a periampullary adenoma from a patient with FAP (175100), Bapat et al. (1993) identified a somatic 4-bp deletion (AGAG) at codon 1464 of the APC gene. The patient had a germline APC mutation (611731.0023).","type":"Description"},"citation":[{"id":[{"value":"8281160","source":"PubMed"}]}],"xref":[{"db":"OMIM","id":"175100","type":"MIM","status":"CURRENT"}]}]}],"measureSet":{"measure":[{"name":[{"elementValue":{"value":"APC, 4-BP DEL, CODON 1464","type":"Preferred"}}],"attributeSet":[{"attribute":{"value":"4-BP DEL, CODON 1464","type":"NonHGVS"}}],"measureRelationship":[{"symbol":[{"elementValue":{"value":"APC","type":"Preferred"}}],"type":"variant in gene"}],"type":"Variation","xref":[{"db":"OMIM","id":"611731.0020","type":"Allelic variant","status":"CURRENT"}]}],"type":"Variant"},"traitSet":{"trait":[{"name":[{"elementValue":{"value":"ADENOMA, PERIAMPULLARY, SOMATIC","type":"Preferred"}}],"type":"Disease"}],"type":"Disease"},"id":21001}], "id":10311588})
report = clinvar_to_evidence_strings.Report()
trait = SimpleNamespace()
trait.trait_counter = 0
trait.clinvar_name = ""
trait.ontology_id = 'http://www.ebi.ac.uk/efo/EFO_0003840'
trait.ontology_label = None
consequence_type = test_clinvar_to_evidence_strings.MAPPINGS.consequence_type_dict["rs387906235"][0]
test_args_1 = (clinvarRecord, clinvarRecord.measures[0], report, trait, consequence_type)
return test_args_1
class CTTVSomaticEvidenceStringInitTest(unittest.TestCase):
def setUp(self):
self.test_args = get_args_CTTVSomaticEvidenceString_init()
self.evidence_string = evidence_strings.CTTVSomaticEvidenceString(*self.test_args)
def test_evidence_string(self):
test_dict = {
"literature": {"references": [{"lit_id": "http://europepmc.org/abstract/MED/8281160"}]},
"disease": {"id": ["http://www.ebi.ac.uk/efo/EFO_0000232"]},
"validated_against_schema_version": "1.6.2",
"target": {
"target_type": "http://identifiers.org/cttv.target/gene_variant",
"id": "http://identifiers.org/ensembl/ENSG00000134982",
"activity": "http://identifiers.org/cttv.activity/unknown"
},
"sourceID": "eva_somatic",
"type": "somatic_mutation",
"access_level": "public",
"unique_association_fields": {
"gene": "ENSG00000134982",
"alleleOrigin": "somatic",
"phenotype": "http://www.ebi.ac.uk/efo/EFO_0000232",
"clinvarAccession": "RCV000000851",
"variant_id": "rs387906235"
},
"evidence": {
"is_associated": True,
"clinical_significance": "Pathogenic",
"provenance_type": {
"literature": {"references": [{"lit_id": "http://europepmc.org/abstract/MED/8281160"}]},
"expert": {"status": True, "statement": "Primary submitter of data"},
"database": {
"id": "EVA",
"dbxref": {
"url": "http://identifiers.org/clinvar.record/RCV000000851",
"id": "http://identifiers.org/clinvar",
"version": "2017-08"
},
"version": "1.0"
}
},
"evidence_codes": ["http://purl.obolibrary.org/obo/ECO_0000205"],
"date_asserted": "2016-02-17T00:00:00",
"urls": [{
"url": "http://www.ncbi.nlm.nih.gov/clinvar/RCV000000851",
"nice_name": "Further details in ClinVar database"
}],
"known_mutations": [{
"preferred_name": "frameshift_variant",
"functional_consequence": "http://purl.obolibrary.org/obo/SO_0001589"
}],
"resource_score": {"type": "probability","value": 1}
}
}
test_ev_string = evidence_strings.CTTVEvidenceString(test_dict, trait=self.test_args[3])
self.assertEqual(self.evidence_string['access_level'], test_ev_string['access_level'])
self.assertEqual(self.evidence_string['evidence'], test_ev_string['evidence'])
self.assertEqual(self.evidence_string['literature'], test_ev_string['literature'])
self.assertEqual(self.evidence_string['disease'], test_ev_string['disease'])
self.assertEqual(self.evidence_string['sourceID'], test_ev_string['sourceID'])
self.assertEqual(self.evidence_string['unique_association_fields'], test_ev_string['unique_association_fields'])
self.assertEqual(self.evidence_string['target'], test_ev_string['target'])
self.assertEqual(self.evidence_string['type'], test_ev_string['type'])
self.assertEqual(self.evidence_string['validated_against_schema_version'], test_ev_string['validated_against_schema_version'])
self.assertEqual(self.evidence_string, test_ev_string)
class GetCTTVVariantTypeTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
crm = SimpleNamespace()
crm.ref = "AGAGACGTACGTACGTACGTACGTACGTACGTACGTACG"
crm.alt = "C"
record_single_a = (crm, "snp single")
crm = SimpleNamespace()
crm.ref = "A"
crm.alt = "C"
record_single_b = (crm, "snp single")
record_single_c = SimpleNamespace()
record_single_c.ref = "AGAGACGTACGTACGTACGTACGTACGTACGTACGTACG"
record_single_c.alt = "AGAGACGTACGTACGTACGTACGTACGTACGTACGTACG"
record_single_c = (crm, "snp single")
cls.test_records_singles = [record_single_a, record_single_b, record_single_c]
crm = SimpleNamespace()
crm.ref = "AGAGACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT"
crm.alt = "C"
record_structurals_a = (crm, "structural variant")
crm = SimpleNamespace()
crm.ref = "A"
crm.alt = "AGAGACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT"
record_structurals_b = (crm, "structural variant")
record_single_c = SimpleNamespace()
record_single_c.ref = "AGAGACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT"
record_single_c.alt = "AGAGACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT"
record_structurals_c = (crm, "structural variant")
cls.test_records_structurals = \
[record_structurals_a, record_structurals_b, record_structurals_c]
def test_get_cttv_variant_type_singles(self):
for record in self.test_records_singles:
self.assertEqual(evidence_strings.get_cttv_variant_type(record[0]), record[1])
def test_get_cttv_variant_type_structurals(self):
for record in self.test_records_structurals:
self.assertEqual(evidence_strings.get_cttv_variant_type(record[0]), record[1])
class CTTVGeneticsEvidenceStringTest(unittest.TestCase):
def setUp(self):
self.test_args = get_args_CTTVGeneticsEvidenceString_init()
self.test_ges = evidence_strings.CTTVGeneticsEvidenceString(*self.test_args)
ot_schema_path = os.path.join(
os.path.dirname(__file__), 'resources', 'opentargets.1.6.2.json.gz')
self.ot_schema_contents = json.loads(gzip.open(ot_schema_path).read().decode('utf-8'))
# CTTVEvidenceString tests
def test_unique_association_field(self):
uaf_1 = ("gene", "test_gene")
uaf_2 = ("clinvarAccession", "test_clinvar")
uaf_3 = ("alleleOrigin", "germline")
uaf_4 = ("phenotype", "test_phenotype")
uaf_5 = ("variant_id", "test_rs")
self.test_ges.add_unique_association_field(*uaf_1)
self.assertEqual(self.test_ges['unique_association_fields'][uaf_1[0]], uaf_1[1])
self.test_ges.add_unique_association_field(*uaf_2)
self.assertEqual(self.test_ges['unique_association_fields'][uaf_2[0]], uaf_2[1])
self.test_ges.add_unique_association_field(*uaf_3)
self.assertEqual(self.test_ges['unique_association_fields'][uaf_3[0]], uaf_3[1])
self.test_ges.add_unique_association_field(*uaf_4)
self.assertEqual(self.test_ges['unique_association_fields'][uaf_4[0]], uaf_4[1])
self.test_ges.add_unique_association_field(*uaf_5)
self.assertEqual(self.test_ges['unique_association_fields'][uaf_5[0]], uaf_5[1])
def test_set_target(self):
target = ("http://identifiers.org/ensembl/ENSG00000135486",
"http://identifiers.org/cttv.activity/predicted_damaging")
self.test_ges._clear_target()
self.test_ges.set_target(*target)
self.assertEqual(self.test_ges['target']['id'], target[0])
self.assertEqual(self.test_ges['target']['activity'], target[1])
def test_disease(self):
disease_id = "Ciliary dyskinesia, primary, 26"
self.test_ges.disease_id = disease_id
def test_evidence_codes(self):
evidence_codes = ["http://purl.obolibrary.org/obo/ECO_0000205"]
self.test_ges.evidence_codes = evidence_codes
self.assertEqual(self.test_ges['evidence']['evidence_codes'], evidence_codes)
self.assertEqual(self.test_ges.evidence_codes, evidence_codes)
def test_top_level_literature(self):
literature = ["http://europepmc.org/abstract/MED/20301537"]
self.test_ges.top_level_literature = literature
self.assertEqual(self.test_ges['literature']['references'],
[{"lit_id": literature_id} for literature_id in literature])
self.assertEqual(self.test_ges.top_level_literature,
[{"lit_id": literature_id} for literature_id in literature])
###
def test_db_xref_url(self):
url = "http://identifiers.org/clinvar.record/RCV000128628"
self.test_ges.db_xref_url = url
self.assertEqual(
self.test_ges['evidence']['gene2variant']['provenance_type']['database']['dbxref']['url'],
url)
self.assertEqual(
self.test_ges['evidence']['variant2disease']['provenance_type']['database']['dbxref']['url'],
url)
self.assertEqual(self.test_ges.db_xref_url, url)
def test_url(self):
url = "http://www.ncbi.nlm.nih.gov/clinvar/RCV000128628"
self.test_ges.url = url
self.assertEqual(self.test_ges['evidence']['gene2variant']['urls'][0]['url'], url)
self.assertEqual(self.test_ges['evidence']['variant2disease']['urls'][0]['url'], url)
self.assertEqual(self.test_ges.url, url)
def test_gene_2_var_ev_codes(self):
ev_codes = ['http://identifiers.org/eco/cttv_mapping_pipeline']
self.test_ges.gene_2_var_ev_codes = ev_codes
self.assertEqual(self.test_ges['evidence']['gene2variant']['evidence_codes'], ev_codes)
self.assertEqual(self.test_ges.gene_2_var_ev_codes, ev_codes)
def test_gene_2_var_func_consequence(self):
functional_consequence = 'http://purl.obolibrary.org/obo/SO_0001583'
self.test_ges.gene_2_var_func_consequence = functional_consequence
self.assertEqual(self.test_ges['evidence']['gene2variant']['functional_consequence'],
functional_consequence)
self.assertEqual(self.test_ges.gene_2_var_func_consequence, functional_consequence)
def test_set_var_2_disease_literature_a(self):
self.test_ges['evidence']['variant2disease']['provenance_type']['literature'] = {}
literature_1 = "PMCID12345"
self.test_ges.set_var_2_disease_literature([literature_1])
self.assertEqual(
self.test_ges['evidence']['variant2disease']['provenance_type']['literature']['references'],
[{"lit_id": literature_1}])
literature_2 = "PMCID9876"
literature_3 = "PMCID7654"
literature_list = [literature_2, literature_3]
self.test_ges.set_var_2_disease_literature(literature_list)
self.assertEqual(
self.test_ges['evidence']['variant2disease']['provenance_type']['literature']['references'],
[{"lit_id": literature_id} for literature_id in literature_list])
def test_set_var_2_disease_literature_b(self):
literature_1 = "PMCID12345"
self.test_ges.set_var_2_disease_literature([literature_1])
self.assertEqual(
self.test_ges['evidence']['variant2disease']['provenance_type']['literature']['references'],
[{"lit_id": literature_1}])
literature_2 = "PMCID9876"
literature_3 = "PMCID7654"
literature_list = [literature_2, literature_3]
self.test_ges.set_var_2_disease_literature(literature_list)
self.assertEqual(
self.test_ges['evidence']['variant2disease']['provenance_type']['literature']['references'],
[{"lit_id": literature_id} for literature_id in literature_list])
def test_association(self):
self.test_ges.association = True
self.assertTrue(self.test_ges['evidence']['gene2variant']['is_associated'])
self.assertTrue(self.test_ges['evidence']['variant2disease']['is_associated'])
self.assertTrue(self.test_ges.association)
self.test_ges.association = False
self.assertFalse(self.test_ges['evidence']['gene2variant']['is_associated'])
self.assertFalse(self.test_ges['evidence']['variant2disease']['is_associated'])
self.assertFalse(self.test_ges.association)
def test_set_variant(self):
test_id = "http://identifiers.org/dbsnp/rs193922494"
test_type = "snp single"
self.test_ges._clear_variant()
self.test_ges.set_variant(test_id, test_type)
self.assertEqual(self.test_ges['variant']['id'], test_id)
self.assertEqual(self.test_ges['variant']['type'], test_type)
def test_unique_reference(self):
unique_reference = "http://europepmc.org/abstract/MED/0"
self.test_ges.unique_reference = unique_reference
self.assertEqual(
self.test_ges['evidence']['variant2disease']['unique_experiment_reference'],
unique_reference)
self.assertEqual(self.test_ges.unique_reference, unique_reference)
def test_date(self):
date_string = datetime.fromtimestamp(1412982000000 / 1000).isoformat()
self.test_ges.date = date_string
self.assertEqual(self.test_ges['evidence']['gene2variant']['date_asserted'], date_string)
self.assertEqual(self.test_ges['evidence']['variant2disease']['date_asserted'],
date_string)
self.assertEqual(self.test_ges.date, date_string)
def test_validate(self):
test_args = get_args_CTTVGeneticsEvidenceString_init()
test_evidence_string = evidence_strings.CTTVGeneticsEvidenceString(*test_args)
self.assertTrue(test_evidence_string.validate(self.ot_schema_contents))
class CTTVSomaticEvidenceStringTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.consequence_type_dict = CT.process_consequence_type_file(config.snp_2_gene_file)
def setUp(self):
test_args = get_args_CTTVSomaticEvidenceString_init()
self.test_ses = evidence_strings.CTTVSomaticEvidenceString(*test_args)
ot_schema_path = os.path.join(
os.path.dirname(__file__), 'resources', 'opentargets.1.6.2.json.gz')
self.ot_schema_contents = json.loads(gzip.open(ot_schema_path).read().decode('utf-8'))
def test_db_xref_url(self):
url = "http://identifiers.org/clinvar.record/RCV000128628"
self.test_ses.db_xref_url = url
self.assertEqual(self.test_ses['evidence']['provenance_type']['database']['dbxref']['url'],
url)
self.assertEqual(self.test_ses.db_xref_url, url)
def test_url(self):
url = "http://www.ncbi.nlm.nih.gov/clinvar/RCV000128628"
self.test_ses.url = url
self.assertEqual(self.test_ses['evidence']['urls'][0]['url'], url)
self.assertEqual(self.test_ses.url, url)
def test_evidence_literature(self):
literature_1 = "PMCID12345"
self.test_ses.evidence_literature = [literature_1]
self.assertEqual(self.test_ses['evidence']['provenance_type']['literature']['references'],
[{"lit_id": literature_1}])
self.assertEqual(self.test_ses.evidence_literature, [{"lit_id": literature_1}])
literature_2 = "PMCID9876"
literature_3 = "PMCID7654"
literature_list = [literature_2, literature_3]
self.test_ses.evidence_literature = literature_list
self.assertEqual(self.test_ses['evidence']['provenance_type']['literature']['references'],
[{"lit_id": literature_id} for literature_id in literature_list])
self.assertEqual(self.test_ses.evidence_literature,
[{"lit_id": literature_id} for literature_id in literature_list])
def test_association(self):
self.test_ses.association = True
self.assertTrue(self.test_ses['evidence']['is_associated'])
self.assertTrue(self.test_ses.association)
self.test_ses.association = False
self.assertFalse(self.test_ses['evidence']['is_associated'])
self.assertFalse(self.test_ses.association)
def test_date(self):
date_string = datetime.fromtimestamp(1412982000000 / 1000).isoformat()
self.test_ses.date = date_string
self.assertEqual(self.test_ses['evidence']['date_asserted'], date_string)
self.assertEqual(self.test_ses.date, date_string)
def test_add_known_mutations(self):
functional_consequence = "http://purl.obolibrary.org/obo/SO_0001791"
preferred_name = "exon_variant"
self.test_ses._clear_known_mutations()
self.test_ses.add_known_mutation(functional_consequence, preferred_name)
self.assertEqual(
self.test_ses['evidence']['known_mutations'],
[{'functional_consequence': functional_consequence, 'preferred_name': preferred_name}])
def test_set_known_mutations(self):
test_consequence_type = CT.ConsequenceType("ENSG00000008710",
CT.SoTerm("3_prime_UTR_variant"))
self.test_ses._clear_known_mutations()
self.test_ses.set_known_mutations(test_consequence_type.so_term)
self.assertEqual(
self.test_ses['evidence']['known_mutations'],
[{'functional_consequence': 'http://purl.obolibrary.org/obo/SO_0001624',
'preferred_name': '3_prime_UTR_variant'}])
def test_validate(self):
test_args = get_args_CTTVSomaticEvidenceString_init()
test_evidence_string = evidence_strings.CTTVSomaticEvidenceString(*test_args)
self.assertTrue(test_evidence_string.validate(self.ot_schema_contents))
| 114.086792
| 14,292
| 0.683326
| 6,971
| 60,466
| 5.788266
| 0.099125
| 0.021611
| 0.030607
| 0.025081
| 0.87938
| 0.861512
| 0.827038
| 0.802032
| 0.756456
| 0.722825
| 0
| 0.084262
| 0.102057
| 60,466
| 529
| 14,293
| 114.302457
| 0.6589
| 0.001125
| 0
| 0.47191
| 0
| 0.029213
| 0.53883
| 0.059561
| 0
| 0
| 0
| 0.00189
| 0.193258
| 1
| 0.083146
| false
| 0
| 0.026966
| 0.002247
| 0.130337
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
484c519ddf57a4e9299152f6c585f0dfabbf8671
| 1,185
|
py
|
Python
|
source/cpu/utils/results.py
|
NikitaMishin/semilocal
|
10e52d2ed051056560ef29fe5a88a7622e4e3f02
|
[
"Apache-2.0"
] | null | null | null |
source/cpu/utils/results.py
|
NikitaMishin/semilocal
|
10e52d2ed051056560ef29fe5a88a7622e4e3f02
|
[
"Apache-2.0"
] | 4
|
2021-03-27T16:25:26.000Z
|
2021-12-16T14:17:39.000Z
|
source/cpu/utils/results.py
|
NikitaMishin/semilocal
|
10e52d2ed051056560ef29fe5a88a7622e4e3f02
|
[
"Apache-2.0"
] | null | null | null |
class Result:
pass
class CombingResult(Result):
def __init__(self, elapsed_time_preprocess: int, elapsed_time_algo: int, hash: int, size_a: int, size_b: int,
name_a, name_b):
self.hash = hash
self.elapsed_time_algo = elapsed_time_algo
self.elapsed_time_preprocess = elapsed_time_preprocess
self.size_a = size_a
self.size_b = size_b
self.name_a = name_a
self.name_b = name_b
class BraidResult(Result):
def __init__(self, elapsed_time_preprocess, elapsed_time_algo, hash, n, seed):
self.hash = hash
self.elapsed_time_algo = elapsed_time_algo
self.elapsed_time_preprocess = elapsed_time_preprocess
self.n = n
self.seed = seed
class LCSResult(Result):
def __init__(self, elapsed_time_preprocess: int, elapsed_time_algo: int, score: int, size_a: int, size_b: int,
name_a, name_b):
self.score = score
self.elapsed_time_algo = elapsed_time_algo
self.elapsed_time_preprocess = elapsed_time_preprocess
self.size_a = size_a
self.size_b = size_b
self.name_a = name_a
self.name_b = name_b
| 32.916667
| 114
| 0.666667
| 167
| 1,185
| 4.299401
| 0.131737
| 0.275766
| 0.188022
| 0.208914
| 0.848189
| 0.848189
| 0.832869
| 0.779944
| 0.779944
| 0.779944
| 0
| 0
| 0.260759
| 1,185
| 35
| 115
| 33.857143
| 0.819635
| 0
| 0
| 0.62069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.103448
| false
| 0.034483
| 0
| 0
| 0.241379
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
487c22bbb49928cbde03c68d51b77a900ed579ac
| 22,842
|
py
|
Python
|
tccli/services/oceanus/oceanus_client.py
|
ws0416/tencentcloud-cli
|
0a90fa77c8be1efa30b196a3eeb31b8be1f6a325
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/oceanus/oceanus_client.py
|
ws0416/tencentcloud-cli
|
0a90fa77c8be1efa30b196a3eeb31b8be1f6a325
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/oceanus/oceanus_client.py
|
ws0416/tencentcloud-cli
|
0a90fa77c8be1efa30b196a3eeb31b8be1f6a325
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.oceanus.v20190422 import oceanus_client as oceanus_client_v20190422
from tencentcloud.oceanus.v20190422 import models as models_v20190422
def doStopJobs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.StopJobsRequest()
model.from_json_string(json.dumps(args))
rsp = client.StopJobs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteResources(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteResourcesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteResources(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateJob(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateJobRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateJob(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateResourceConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateResourceConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateResourceConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeResourceRelatedJobs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeResourceRelatedJobsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeResourceRelatedJobs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRunJobs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RunJobsRequest()
model.from_json_string(json.dumps(args))
rsp = client.RunJobs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeResources(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeResourcesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeResources(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeJobs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeJobsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeJobs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateJobConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateJobConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateJobConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteResourceConfigs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteResourceConfigsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteResourceConfigs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateResource(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateResourceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateResource(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSystemResources(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSystemResourcesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSystemResources(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteTableConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteTableConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteTableConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeJobConfigs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeJobConfigsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeJobConfigs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeResourceConfigs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.OceanusClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeResourceConfigsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeResourceConfigs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20190422": oceanus_client_v20190422,
}
MODELS_MAP = {
"v20190422": models_v20190422,
}
ACTION_MAP = {
"StopJobs": doStopJobs,
"DeleteResources": doDeleteResources,
"CreateJob": doCreateJob,
"CreateResourceConfig": doCreateResourceConfig,
"DescribeResourceRelatedJobs": doDescribeResourceRelatedJobs,
"RunJobs": doRunJobs,
"DescribeResources": doDescribeResources,
"DescribeJobs": doDescribeJobs,
"CreateJobConfig": doCreateJobConfig,
"DeleteResourceConfigs": doDeleteResourceConfigs,
"CreateResource": doCreateResource,
"DescribeSystemResources": doDescribeSystemResources,
"DeleteTableConfig": doDeleteTableConfig,
"DescribeJobConfigs": doDescribeJobConfigs,
"DescribeResourceConfigs": doDescribeResourceConfigs,
}
AVAILABLE_VERSION_LIST = [
"v20190422",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["oceanus"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["oceanus"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| 42.143911
| 105
| 0.721259
| 2,604
| 22,842
| 6.091398
| 0.065284
| 0.08057
| 0.223994
| 0.054092
| 0.801286
| 0.789623
| 0.785084
| 0.780545
| 0.774619
| 0.723238
| 0
| 0.010198
| 0.171439
| 22,842
| 541
| 106
| 42.221811
| 0.827909
| 0.007486
| 0
| 0.653846
| 0
| 0
| 0.041716
| 0.00415
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036325
| false
| 0
| 0.025641
| 0.002137
| 0.066239
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6fa3040e4859ca04f01125278d2c262c4d8cafe2
| 4,864
|
py
|
Python
|
app/users/test_users.py
|
Leo-G/T
|
fb85aa34e98f1f2126d960ceeafa59489d659bd2
|
[
"MIT"
] | null | null | null |
app/users/test_users.py
|
Leo-G/T
|
fb85aa34e98f1f2126d960ceeafa59489d659bd2
|
[
"MIT"
] | 1
|
2015-11-12T10:06:03.000Z
|
2015-11-12T10:06:03.000Z
|
app/users/test_users.py
|
Leo-G/T
|
fb85aa34e98f1f2126d960ceeafa59489d659bd2
|
[
"MIT"
] | null | null | null |
import unittest
import os
import sys
# Add app path to module path
sys.path.append(os.path.dirname(os.path.realpath(__file__).rsplit('/', 2)[0]))
from app import create_app
from app.users.models import Users
app = create_app('config')
class TestUsers(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def test_read(self):
self.app = app.test_client()
rv = self.app.get('/users/')
assert "Users" in rv.data.decode('utf-8')
def test_01_add(self):
rv = self.app.post('/users/add', data=dict(
email='testing@flask.pocoo.com',
password='test string',
name='test string',
address="""How to build CRUD app with Python, Flask, SQLAlchemy and MySQL
In this post I will briefly describe,
how you can you build a database driven CRUD (Create, Read, Update, Delete) app on Linux with Python,
Flask, SQLAlchemy and MySQL. I used this process to create a blog and hence the examples below will
describe how to store and modify posts in a MySQL database. You can also download the complete source
code from https://github.com/Leo-g/Flask-Skeleton/
Software Versions
Python 2.7
Flask 0.11
Flask-SQLAlchemy 2.0
Flask-Migrate 1.3
MySQL-python 1.2
Foundation 5
Mariadb 10
Before you continue if you have not built an application on Linux with Flask or Python then
I recommend you read Creating your first Linux App with Python and Flask.
Read more at http://techarena51.com/index.php/flask-sqlalchemy-tutorial/""",
is_active='False',
creation_time='2015-12-22T03:12:58.019077+00:00',
modification_time='2015-12-22T03:12:58.019077+00:00',
role='35678',), follow_redirects=True)
assert 'Add was successful' in rv.data.decode('utf-8')
def test_02_Update(self):
with app.app_context():
id = Users.query.first().id
rv = self.app.post(
'/users/update/{}'.format(id), data=dict(
email='testing@flask.pocoo.com',
password='test string',
name='test string',
address="""How to build CRUD app with Python, Flask, SQLAlchemy and MySQL
In this post I will briefly describe,
how you can you build a database driven CRUD (Create, Read, Update, Delete) app on Linux with Python,
Flask, SQLAlchemy and MySQL. I used this process to create a blog and hence the examples below will
describe how to store and modify posts in a MySQL database. You can also download the complete source
code from https://github.com/Leo-g/Flask-Skeleton/
Software Versions
Python 2.7
Flask 0.11
Flask-SQLAlchemy 2.0
Flask-Migrate 1.3
MySQL-python 1.2
Foundation 5
Mariadb 10
Before you continue if you have not built an application on Linux with Flask or Python then
I recommend you read Creating your first Linux App with Python and Flask.
Read more at http://techarena51.com/index.php/flask-sqlalchemy-tutorial/""",
is_active='False',
creation_time='2015-12-22T03:12:58.019077+00:00',
modification_time='2015-12-22T03:12:58.019077+00:00',
role='35678',), follow_redirects=True)
assert 'Update was successful' in rv.data.decode('utf-8')
def test_03_delete(self):
with app.app_context():
id = Users.query.first().id
rv = self.app.post(
'users/delete/{}'.format(id), follow_redirects=True)
assert 'Delete was successful' in rv.data.decode('utf-8')
if __name__ == '__main__':
unittest.main()
| 48.158416
| 140
| 0.495683
| 543
| 4,864
| 4.377532
| 0.276243
| 0.050484
| 0.015145
| 0.023559
| 0.844762
| 0.83719
| 0.816996
| 0.816996
| 0.793437
| 0.793437
| 0
| 0.053957
| 0.428454
| 4,864
| 100
| 141
| 48.64
| 0.801079
| 0.005551
| 0
| 0.708861
| 0
| 0.025316
| 0.675905
| 0.035988
| 0
| 0
| 0
| 0
| 0.050633
| 1
| 0.063291
| false
| 0.025316
| 0.063291
| 0
| 0.139241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d229128ffe27a7cda50645c355062d3f3b3019e5
| 47,702
|
py
|
Python
|
version_1/v1_4/chaos.py
|
andrewcistola/fracture-proof
|
d67f8f4f688fe4168d7eae3a3f991463ee112324
|
[
"MIT"
] | 2
|
2020-03-03T16:53:03.000Z
|
2021-07-13T17:50:03.000Z
|
version_1/v1_4/chaos.py
|
andrewcistola/fracture-proof
|
d67f8f4f688fe4168d7eae3a3f991463ee112324
|
[
"MIT"
] | null | null | null |
version_1/v1_4/chaos.py
|
andrewcistola/fracture-proof
|
d67f8f4f688fe4168d7eae3a3f991463ee112324
|
[
"MIT"
] | null | null | null |
# FractureProof Chaos
label = "_chaos"
path = "fp/v1_4/"
version = "FractureProof v1.4"
title = "Chaos: Finding Significant Predictors in Multi-Level Geographic Data"
author = "DrewC!"
day = str(date.today())
stamp = str(dt.datetime.now())
## Setup Workspace
### Import python libraries
import os # Operating system navigation
from datetime import date # date stamping in Python
from datetime import time # time stamping in Python
### Import data science libraries
import pandas as pd # Widely used data manipulation library with R/Excel like tables named 'data frames'
import numpy as np # Widely used matrix library for numerical processes
import statsmodels.api as sm # Statistics package best for regression models
### Import scikit-learn libraries
from sklearn.preprocessing import StandardScaler # Standard scaling for easier use of machine learning algorithms
from sklearn.impute import SimpleImputer # Univariate imputation for missing data
from sklearn.cluster import KMeans # clusters data by trying to separate samples in n groups of equal variance
from sklearn.decomposition import PCA # Principal compnents analysis from sklearn
from sklearn.ensemble import RandomForestRegressor # Random Forest regression component
from sklearn.ensemble import RandomForestClassifier # Random Forest classification component
from sklearn.feature_selection import RFECV # Recursive Feature elimination with cross validation
from sklearn.linear_model import LinearRegression # Used for machine learning with quantitative outcome
from sklearn.linear_model import LogisticRegression # Used for machine learning with quantitative outcome
from sklearn.metrics import roc_curve # Reciever operator curve
from sklearn.metrics import auc # Area under the curve
### Import keras libraries
from keras.models import Sequential # Uses a simple method for building layers in MLPs
from keras.models import Model # Uses a more complex method for building layers in deeper networks
from keras.layers import Dense # Used for creating dense fully connected layers
from keras.layers import Input # Used for designating input layers
### Set Directory
os.chdir("C:/Users/drewc/GitHub/allocativ") # Set wd to project repository
### Setup Script Results File
text_file = open(path + day + "_results" + label + ".txt", "w") # Open text file and name with subproject, content, and result suffix
text_file.write("####################" + "\n\n")
text_file.write(title + "\n") # Line of text with space after
text_file.write(version + "\n") # Line of text with space after
text_file.write(author + "\n") # Line of text with space after
text_file.write(stamp + "\n") # Line of text with space after
text_file.write("\n" + "####################" + "\n\n")
text_file.close() # Close file
# Step 1: Raw Data Processing
sub = "Step 1: Raw Data Processing and Feature Engineering"
y = "Final Payment Adjustments FY 2018-2020"
a = "CMS Hospital Compare 2018 release "
d = "CMS Hospital General Info 2018 release"
c = "US CENSUS American Community Survey 5 Year Average by Zip Code 2018 release"
g = "HRSA Area Health Resource File by County 2018 release"
## Process Raw Outcome Data
### Individual Features and Targets
df_raw = pd.read_csv("hnb/CMS/CMS_2018_FIPS_full.csv", low_memory = 'false') # Import dataset saved as csv in _data folder
df_raw['Facility ID'] = df_raw['Facility ID'].astype("str") # Change data type of column in data frame
df_raw = df_raw.dropna(subset = ["2020 VBP Adjustment Factor"])
df_raw["train"] = np.where(df_raw["2020 VBP Adjustment Factor"] < 1, 1, 0) # Create categorical test target outcome based on conditions
df_raw["test"] = np.where(df_raw["2019 VBP Adjustment Factor"] < 1, 1, 0) # Create categorical test target outcome based on conditions
df_raw["test2"] = np.where(df_raw["2018 VBP Adjustment Factor"] < 1, 1, 0) # Create categorical test target outcome based on conditions
df_raw["quant"] = df_raw["2020 VBP Adjustment Factor"] # Rename multiple columns in place
df_raw = df_raw.drop(columns = ["2018 VBP Adjustment Factor", "2019 VBP Adjustment Factor", "2020 VBP Adjustment Factor"]) # Drop quantitative variables used to create target
df_raw.info() # Get class, memory, and column info: names, data types, obs.
### Export Targets
Y_raw = df_raw.filter(["FIPS", "train", "test", "test2", "quant", "Facility ID"])
Y_raw = Y_raw.set_index(["Facility ID", "FIPS"]) # Set column as index
Y_ss = pd.DataFrame(StandardScaler().fit_transform(Y_raw.values), columns = Y_raw.columns) # Standard scale values by converting the normalized features into a tabular format with the help of DataFrame.
Y_raw = Y_raw.reset_index(level = ["Facility ID", "FIPS"]) # Reset Index
Y_ss["Facility ID"] = Y_raw["Facility ID"]
Y_ss["FIPS"] = Y_raw["FIPS"]
Y_ss = Y_ss.set_index(["Facility ID"]) # Set column as index
Y_quant = Y_ss["quant"]
Y_train = Y_ss["train"]
Y_test = Y_ss["test"]
Y_test2 = Y_ss["test2"]
Y_ss.info() # Get class, memory, and column info: names, data types, obs.
## Process Raw Predictor Data
### Individual Agent Predictors
X_a_raw = df_raw.drop(columns = ["Total Performance Score",
"Weighted Normalized Clinical Outcomes Domain Score",
"Weighted Safety Domain Score",
"Weighted Person and Community Engagement Domain Score",
"Weighted Efficiency and Cost Reduction Domain Score",
"Medicare hospital spending per patient (Medicare Spending per Beneficiary)",
"Rate of readmission after discharge from hospital (hospital-wide)",
"Hospital overall rating",
"Hospital Ownership ForProfit",
"TOTAL HAC SCORE",
"FIPS",
"quant",
"train",
"test",
"test2"]) # Drop proximity features: Adjustment factor scores
X_a_raw = X_a_raw.set_index("Facility ID") # Set column as index
X_a_na = X_a_raw.dropna(axis = 1, thresh = 0.75*len(X_a_raw)) # Drop features less than 75% non-NA count for all columns
X_a_na = pd.DataFrame(SimpleImputer(strategy = "median").fit_transform(X_a_na), columns = X_a_na.columns) # Impute missing data
X_a = pd.DataFrame(StandardScaler().fit_transform(X_a_na.values), columns = X_a_na.columns) # Standard scale values by converting the normalized features into a tabular format with the help of DataFrame.
X_a_raw = X_a_raw.reset_index(level = ["Facility ID"]) # Reset Index
X_a["Facility ID"] = X_a_raw["Facility ID"]
X_a = X_a.set_index(["Facility ID"]) # Set column as index
X_a.info() # Get class, memory, and column info: names, data types, obs.
### Individual Demographic Predictors
X_d_raw = df_raw.filter(["Medicare hospital spending per patient (Medicare Spending per Beneficiary)",
"Rate of readmission after discharge from hospital (hospital-wide)",
"Hospital overall rating",
"Hospital Ownership ForProfit",
"TOTAL HAC SCORE",
"Facility ID"]) # Subset by hand selected features for model
X_d_raw = X_d_raw.set_index("Facility ID") # Set column as index
X_d_na = X_d_raw.dropna(axis = 1, thresh = 0.75*len(X_d_raw)) # Drop features less than 75% non-NA count for all columns
X_d_na = pd.DataFrame(SimpleImputer(strategy = "median").fit_transform(X_d_na), columns = X_d_na.columns) # Impute missing data
X_d = pd.DataFrame(StandardScaler().fit_transform(X_d_na.values), columns = X_d_na.columns) # Standard scale values by converting the normalized features into a tabular format with the help of DataFrame.
X_d_raw = X_d_raw.reset_index(level = ["Facility ID"]) # Reset Index
X_d["Facility ID"] = X_d_raw["Facility ID"]
X_d = X_d.set_index(["Facility ID"]) # Set column as index
X_d.info() # Get class, memory, and column info: names, data types, obs.
### Ecological Contextual Predictors
X_c_raw = pd.read_csv("hnb/ACS/DP5Y2018/ACS_DP5Y2018_FIPS_gini.csv") # Import dataset saved as csv in _data folder
X_c_raw = pd.merge(Y_raw, X_c_raw, on = "FIPS", how = "left") # Join by column while keeping only items that exist in both, select outer or left for other options
X_c_raw = X_c_raw.set_index("Facility ID") # Set column as index
X_c_raw = X_c_raw.drop(columns = ["FIPS", "train", "test", "test2", "quant"]) # Drop quantitative variables used to create target
X_c_na = X_c_raw.dropna(axis = 1, thresh = 0.75*len(X_c_raw)) # Drop features less than 75% non-NA count for all columns
X_c_na = pd.DataFrame(SimpleImputer(strategy = "median").fit_transform(X_c_na), columns = X_c_na.columns) # Impute missing data
X_c = pd.DataFrame(StandardScaler().fit_transform(X_c_na.values), columns = X_c_na.columns) # Standard scale values by converting the normalized features into a tabular format with the help of DataFrame.
X_c_raw = X_c_raw.reset_index(level = ["Facility ID"]) # Reset Index
X_c["Facility ID"] = X_c_raw["Facility ID"]
X_c = X_c.set_index(["Facility ID"]) # Set column as index
X_c.info() # Get class, memory, and column info: names, data types, obs.X_g.info() # Get class, memory, and column info: names, data types, obs.
### Ecological Global Predictors
X_g_raw = pd.read_csv("hnb/HRSA/AHRF/AHRF_2018_2019_SAS/AHRF_full.csv") # Import dataset saved as csv in _data folder
X_g_raw = X_g_raw.loc[:, X_g_raw.columns.str.contains('2018|2017|2016|2015|2014|FIPS')] # Select columns by string value
X_g_raw = pd.merge(Y_raw, X_g_raw, on = "FIPS", how = "left") # Join by column while keeping only items that exist in both, select outer or left for other options
X_g_raw = X_g_raw.set_index("Facility ID") # Set column as index
X_g_raw = X_g_raw.drop(columns = ["FIPS", "train", "test", "test2", "quant"]) # Drop quantitative variables used to create target
X_g_na = X_g_raw.dropna(axis = 1, thresh = 0.75*len(X_g_raw)) # Drop features less than 75% non-NA count for all columns
X_g_na = pd.DataFrame(SimpleImputer(strategy = "median").fit_transform(X_g_na), columns = X_g_na.columns) # Impute missing data
X_g = pd.DataFrame(StandardScaler().fit_transform(X_g_na.values), columns = X_g_na.columns) # Standard scale values by converting the normalized features into a tabular format with the help of DataFrame.
X_g_raw = X_g_raw.reset_index(level = ["Facility ID"]) # Reset Index
X_g["Facility ID"] = X_g_raw["Facility ID"]
X_g = X_g.set_index(["Facility ID"]) # Set column as index
X_g.info() # Get class, memory, and column info: names, data types, obs.X_g.info() # Get class, memory, and column info: names, data types, obs.
### Append to Text File
text_file = open(path + day + "_results" + label + ".txt", "a") # Open text file and name with subproject, content, and result suffix
text_file.write(sub + "\n\n") # Line of text with space after
text_file.write(y + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Quantitative = 2020 VBP Adjsutment Factor" + "\n")
text_file.write(" Binary = 0/1, No/Yes, Penalty Applied" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" quant, train, test, test2 = FY2020(Q), FY2020(B), FY2019(B), FY2018(B)" + "\n")
text_file.write(str(Y_raw.describe()) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(a + "\n") # Add two lines of blank text at end of every section text
text_file.write(" (Rows, Columns) = " + str(X_a.shape) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Level = Hospital" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Year = 2018" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Feature Engineeering = 75% nonNA, Median Imputed NA, Standard Scaled" + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(d + "\n") # Add two lines of blank text at end of every section text
text_file.write(" (Rows, Columns) = " + str(X_d.shape) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Level = Hospital" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Year = 2018" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Feature Engineeering = 75% nonNA, Median Imputed NA, Standard Scaled" + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(c + "\n") # Add two lines of blank text at end of every section text
text_file.write(" (Rows, Columns) = " + str(X_c.shape) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Level = Hospital" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Year = 2018 relese, includes 5 year average for 2014-2018" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Feature Engineeering = Gini index and Average of Zip Codes by County, 75% nonNA, Median Imputed NA, Standard Scaled" + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(g + "\n") # Add two lines of blank text at end of every section text
text_file.write(" (Rows, Columns) = " + str(X_g.shape) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Level = County" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Year = 2018 release, kept features from 2015-2018" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Feature Engineeering = 75% nonNA, Median Imputed NA, Standard Scaled" + "\n\n") # Add two lines of blank text at end of every section text
text_file.write("####################" + "\n\n")
text_file.close() # Close file
# Step 2: Intital Prediction with Closed Box Models
sub2 = "Step 2: Initial Prediction with Closed Models"
m1 = "Multi-Layer Perceptron"
m2 = " with Autoencoder"
## Mutli-Layer Perceptron for Individual Agencies
### Build Network with keras Sequential API
# Prep Inputs
input = X_a.shape[1] # Save number of columns as length minus quant, test, train and round to nearest integer
nodes = round(input / 2) # Number of input dimensions divided by two for nodes in each layer
network = Sequential()
# Dense Layers
network.add(Dense(nodes, activation = 'relu', kernel_initializer = 'random_normal', input_dim = input)) # First Hidden Layer
network.add(Dense(nodes, activation = 'relu', kernel_initializer = 'random_normal')) # Second Hidden Layer
# Activation Layer
network.add(Dense(1, activation = 'sigmoid', kernel_initializer = 'random_normal')) # Output Layer
# Compile
network.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy']) # Compile network with ADAM ("Adaptive moment estimation" or RMSProp + Momentum)
# Fit
network.fit(X_a, Y_train, batch_size = 10, epochs = 100) # Fitting the data to the train outcome
# Predict
Y_a = network.predict(X_a) # Predict values from testing model
# AUC Score
fpr, tpr, threshold = roc_curve((Y_train > 0), (Y_a > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Ia_train = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve((Y_test > 0), (Y_a > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Ia_test = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve((Y_test2 > 0), (Y_a > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Ia_test2 = auc(fpr, tpr) # Plot ROC and get AUC score
## Mutli-Layer Perceptron for Individual Demographics
### Build Network with keras Sequential API
# Prep Inputs
input = X_d.shape[1] # Save number of columns as length minus quant, test, train and round to nearest integer
nodes = round(input / 2) # Number of input dimensions divided by two for nodes in each layer
network = Sequential()
# Dense Layers
network.add(Dense(nodes, activation = 'relu', kernel_initializer = 'random_normal', input_dim = input)) # First Hidden Layer
network.add(Dense(nodes, activation = 'relu', kernel_initializer = 'random_normal')) # Second Hidden Layer
# Activation Layer
network.add(Dense(1, activation = 'sigmoid', kernel_initializer = 'random_normal')) # Output Layer
# Compile
network.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy']) # Compile network with ADAM ("Adaptive moment estimation" or RMSProp + Momentum)
# Fit
network.fit(X_d, Y_train, batch_size = 10, epochs = 100) # Fitting the data to the train outcome
# Predict
Y_d = network.predict(X_d) # Predict values from testing model
# AUC Score
fpr, tpr, threshold = roc_curve((Y_train > 0), (Y_d > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Id_train = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve((Y_test > 0), (Y_d > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Id_test = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve((Y_test2 > 0), (Y_d > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Id_test2 = auc(fpr, tpr) # Plot ROC and get AUC score
## MLP with stacked autoencoder for ecological contexts
### Build Network with keras Functional API
# Prep Inputs
input = X_c.shape[1]# Save number of columns as length minus quant, test, train and round to nearest integer
nodes = round(input / 2) # Number of input dimensions divided by two for nodes in each layer
# Input layer
i = Input(shape = (input,))
# Encoder
x = Dense(800, activation = 'relu')(i)
x = Dense(200, activation = 'relu')(x)
x = Dense(50, activation = 'relu')(x)
x = Dense(10, activation = 'relu')(x)
# Decoder
x = Dense(10, activation = 'relu')(x)
x = Dense(50, activation = 'relu')(x)
x = Dense(200, activation = 'relu')(x)
x = Dense(800, activation = 'relu')(x)
# Dense Layers
x = Dense(nodes, activation = 'relu')(x)
x = Dense(nodes, activation = 'relu')(x) # First Hidden Layer
# Output layer
x = Dense(1, activation = 'sigmoid')(x) # Output Layer
# Save network structure
network = Model(i, x)
# Compile
network.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy']) # Compile network with ADAM ("Adaptive moment estimation" or RMSProp + Momentum)
# Fit
network.fit(X_c, Y_train, batch_size = 10, epochs = 5) # Fitting the data to the train outcome
# Predict
Y_c = network.predict(X_c) # Predict values from testing model
# AUC Score
fpr, tpr, threshold = roc_curve((Y_train > 0), (Y_c > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Ic_train = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve((Y_test > 0), (Y_c > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Ic_test = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve((Y_test2 > 0), (Y_c > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Ic_test2 = auc(fpr, tpr) # Plot ROC and get AUC score
## MLP with stacked autoencoder for Ecological globals
### Build Network with keras Functional API
# Prep Inputs
input = X_g.shape[1]# Save number of columns as length minus quant, test, train and round to nearest integer
nodes = round(input / 2) # Number of input dimensions divided by two for nodes in each layer
# Input layer
i = Input(shape = (input,))
# Encoder
x = Dense(1600, activation = 'relu')(i)
x = Dense(400, activation = 'relu')(x)
x = Dense(100, activation = 'relu')(x)
x = Dense(25, activation = 'relu')(x)
# Decoder
x = Dense(25, activation = 'relu')(x)
x = Dense(100, activation = 'relu')(x)
x = Dense(400, activation = 'relu')(x)
x = Dense(1600, activation = 'relu')(x)
# Dense Layers
x = Dense(nodes, activation = 'relu')(x)
x = Dense(nodes, activation = 'relu')(x) # First Hidden Layer
# Output layer
x = Dense(1, activation = 'sigmoid')(x) # Output Layer
# Save network structure
network = Model(i, x)
# Compile
network.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy']) # Compile network with ADAM ("Adaptive moment estimation" or RMSProp + Momentum)
# Fit
network.fit(X_g, Y_train, batch_size = 10, epochs = 10) # Fitting the data to the train outcome
# Predict
Y_g = network.predict(X_g) # Predict values from testing model
# AUC Score
fpr, tpr, threshold = roc_curve((Y_train > 0), (Y_g > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Ig_train = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve((Y_test > 0), (Y_g > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Ig_test = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve((Y_test2 > 0), (Y_g > 0.5)) # Create ROC outputs, true positive rate and false positive rate
Ig_test2 = auc(fpr, tpr) # Plot ROC and get AUC score
## Export Intitial Prediction Results
### Append to Text File
text_file = open(path + day + "_results" + label + ".txt", "a") # Open text file and name with subproject, content, and result suffix
text_file.write(sub2 + "\n\n") # Line of text with space after
text_file.write(a + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Network = " + m1 +"\n") # Add two lines of blank text at end of every section text
text_file.write(" Layers = Dense, Dense, Activation" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Functions = ReLU, ReLU, Sigmoid" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Epochs = 100" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Targets = (train, test, test2), (FY2020, FY2019, FY2018)" + "\n")
text_file.write(" AUC Scores" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" train = " + str(Ia_train) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(Ia_test) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(Ia_test2) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(d + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Network = " + m1 +"\n") # Add two lines of blank text at end of every section text
text_file.write(" Layers = Dense, Dense, Activation" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Functions = ReLU, ReLU, Sigmoid" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Targets = (train, test, test2), (FY2020, FY2019, FY2018)" + "\n")
text_file.write(" Epochs = 100" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" AUC Scores" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" train = " + str(Id_train) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(Id_test) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(Id_test2) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(c + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Network = " + m1 + m2 +"\n") # Add two lines of blank text at end of every section text
text_file.write(" Layers = Input, Decoder, Encoder, Dense, Dense, Activation" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Functions = Input,ReLU, ReLU, ReLU, ReLU, Sigmoid" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Targets = (train, test, test2), (FY2020, FY2019, FY2018)" + "\n")
text_file.write(" Epochs = 10" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" AUC Scores" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" train = " + str(Ic_train) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(Ic_test) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(Ic_test2) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(g + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Network = " + m1 + m2 +"\n") # Add two lines of blank text at end of every section text
text_file.write(" Layers = Input, Decoder, Encoder, Dense, Dense, Activation" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Functions = Input,ReLU, ReLU, ReLU, ReLU, Sigmoid" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Targets = (train, test, test2), (FY2020, FY2019, FY2018)" + "\n")
text_file.write(" Epochs = 10" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" AUC Scores" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" train = " + str(Ig_train) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(Ig_test) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(Ig_test2) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write("####################" + "\n\n")
text_file.close() # Close file
# Step 3: Identify Predictors with Open Box Models
sub3 = "Step 3: Identify Predictors with Open Models"
m3 = "Principal Component Analysis"
m4 = "Random Forests"
m5 = "Recursive feature Elimination"
m6 = "Multiple Regression"
## Identify Predictors for Individual Agents
### Principal Component Analysis
degree = len(X_a.columns) - 1 # Save number of features -1 to get degrees of freedom
pca = PCA(n_components = degree) # Pass the number of components to make PCA model based on degrees of freedom
pca.fit(X_a) # Fit initial PCA model
df_comp = pd.DataFrame(pca.explained_variance_) # Print explained variance of components
df_comp = df_comp[(df_comp[0] > 1)] # Save eigenvalues above 1 to identify components
components = len(df_comp.index) - 3 # Save count of components for Variable reduction
pca = PCA(n_components = components) # you will pass the number of components to make PCA model
pca.fit_transform(X_a) # finally call fit_transform on the aggregate data to create PCA results object
df_pc = pd.DataFrame(pca.components_, columns = X_a.columns) # Export eigenvectors to data frame with column names from original data
df_pc["Variance"] = pca.explained_variance_ratio_ # Save eigenvalues as their own column
df_pc = df_pc[df_pc["Variance"] > df_pc["Variance"].mean()] # Susbet by eigenvalues with above average exlained variance ratio
df_pc = df_pc.abs() # Get absolute value of eigenvalues
df_pc = df_pc.drop(columns = ["Variance"]) # Drop outcomes and targets
df_p = pd.DataFrame(df_pc.max(), columns = ["MaxEV"]) # select maximum eigenvector for each feature
df_p = df_p[df_p.MaxEV > df_p.MaxEV.mean()] # Susbet by above average max eigenvalues
df_p = df_p.reset_index() # Add a new index of ascending values, existing index consisting of feature labels becomes column named "index"
df_pca = df_p.rename(columns = {"index": "Features"}) # Rename former index as features
df_pca = df_pca.sort_values(by = ["MaxEV"], ascending = False) # Sort Columns by Value
### Random Forest Regressor
forest = RandomForestRegressor(n_estimators = 1000, max_depth = 10) #Use default values except for number of trees. For a further explanation see readme included in repository.
forest.fit(X_a, Y_quant) # Fit Forest model, This will take time
rf = forest.feature_importances_ # Output importances of features
l_rf = list(zip(X_a, rf)) # Create list of variables alongside importance scores
df_rf = pd.DataFrame(l_rf, columns = ["Features", "Gini"]) # Create data frame of importances with variables and gini column names
df_rf = df_rf[(df_rf["Gini"] > df_rf["Gini"].mean())] # Subset by Gini values higher than mean
df_rf = df_rf.sort_values(by = ["Gini"], ascending = False) # Sort Columns by Value
### Recursive Feature Elimination
df_pca_rf = pd.merge(df_pca, df_rf, on = "Features", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
pca_rf = df_pca_rf["Features"].tolist() # Save features from data frame
recursive = RFECV(estimator = LinearRegression(), min_features_to_select = 5) # define selection parameters, in this case all features are selected. See Readme for more ifo
recursive.fit(X_a[pca_rf], Y_quant) # This will take time
rfe = recursive.support_ # Save Boolean values as numpy array
l_rfe = list(zip(X_a[pca_rf], rfe)) # Create list of variables alongside RFE value
df_rfe = pd.DataFrame(l_rfe, columns = ["Features", "RFE"]) # Create data frame of importances with variables and gini column names
df_rfe = df_rfe[df_rfe.RFE == True] # Select Variables that were True
df_rfe = pd.merge(df_rfe, df_pca_rf, on = "Features", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
### Multiple Regression
pca_rf_rfe = df_rfe["Features"].tolist() # Save chosen featres as list
regression = LinearRegression() # Linear Regression in scikit learn
regression.fit(X_a[pca_rf_rfe], Y_quant) # Fit model
coef = regression.coef_ # Coefficient models as scipy array
l_reg = list(zip(X_a[pca_rf_rfe], coef)) # Create list of variables alongside coefficient
df_reg = pd.DataFrame(l_reg, columns = ["Features", "Coefficients"]) # Create data frame of importances with variables and gini column names
df_reg = df_reg.sort_values(by = ["Coefficients"], ascending = False) # Sort Columns by Value
### Export feature attributes
fp_X_a = pd.merge(df_rfe, df_reg, on = "Features", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
## Identify Predictors for Individual Demographics
### Multiple Regression
regression = LinearRegression() # Linear Regression in scikit learn
regression.fit(X_d, Y_quant) # Fit model
coef = regression.coef_ # Coefficient models as scipy array
l_reg = list(zip(X_d, coef)) # Create list of variables alongside coefficient
df_reg = pd.DataFrame(l_reg, columns = ["Features", "Coefficients"]) # Create data frame of importances with variables and gini column names
df_reg = df_reg.sort_values(by = ["Coefficients"], ascending = False) # Sort Columns by Value
### Export feature attributes for Individual Demographics
fp_X_d = df_reg
## Identify Predictors for Ecological Contexts
### Principal Component Analysis
degree = len(X_c.columns) - 1 # Save number of features -1 to get degrees of freedom
pca = PCA(n_components = degree) # Pass the number of components to make PCA model based on degrees of freedom
pca.fit(X_c) # Fit initial PCA model
df_comp = pd.DataFrame(pca.explained_variance_) # Print explained variance of components
df_comp = df_comp[(df_comp[0] > 1)] # Save eigenvalues above 1 to identify components
components = len(df_comp.index) - 3 # Save count of components for Variable reduction
pca = PCA(n_components = components) # you will pass the number of components to make PCA model
pca.fit_transform(X_c) # finally call fit_transform on the aggregate data to create PCA results object
df_pc = pd.DataFrame(pca.components_, columns = X_c.columns) # Export eigenvectors to data frame with column names from original data
df_pc["Variance"] = pca.explained_variance_ratio_ # Save eigenvalues as their own column
df_pc = df_pc[df_pc["Variance"] > df_pc["Variance"].mean()] # Susbet by eigenvalues with above average exlained variance ratio
df_pc = df_pc.abs() # Get absolute value of eigenvalues
df_pc = df_pc.drop(columns = ["Variance"]) # Drop outcomes and targets
df_p = pd.DataFrame(df_pc.max(), columns = ["MaxEV"]) # select maximum eigenvector for each feature
df_p = df_p[df_p.MaxEV > df_p.MaxEV.mean()] # Susbet by above average max eigenvalues
df_p = df_p.reset_index() # Add a new index of ascending values, existing index consisting of feature labels becomes column named "index"
df_pca = df_p.rename(columns = {"index": "Features"}) # Rename former index as features
df_pca = df_pca.sort_values(by = ["MaxEV"], ascending = False) # Sort Columns by Value
### Random Forest Regresson
forest = RandomForestRegressor(n_estimators = 1000, max_depth = 10) #Use default values except for number of trees. For a further explanation see readme included in repository.
forest.fit(X_c, Y_quant) # Fit Forest model, This will take time
rf = forest.feature_importances_ # Output importances of features
l_rf = list(zip(X_c, rf)) # Create list of variables alongside importance scores
df_rf = pd.DataFrame(l_rf, columns = ["Features", "Gini"]) # Create data frame of importances with variables and gini column names
df_rf = df_rf[(df_rf["Gini"] > df_rf["Gini"].mean())] # Subset by Gini values higher than mean
df_rf = df_rf.sort_values(by = ["Gini"], ascending = False) # Sort Columns by Value
### Recursive Feature Elimination
df_pca_rf = pd.merge(df_pca, df_rf, on = "Features", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
pca_rf = df_pca_rf["Features"].tolist() # Save features from data frame
recursive = RFECV(estimator = LinearRegression(), min_features_to_select = 5) # define selection parameters, in this case all features are selected. See Readme for more ifo
recursive.fit(X_c[pca_rf], Y_quant) # This will take time
rfe = recursive.ranking_ # Save Boolean values as numpy array
l_rfe = list(zip(X_c[pca_rf], rfe)) # Create list of variables alongside RFE value
df_rfe = pd.DataFrame(l_rfe, columns = ["Features", "RFE"]) # Create data frame of importances with variables and gini column names
df_rfe = df_rfe.sort_values(by = ["RFE"], ascending = True) # Sort Columns by Value
df_rfe = df_rfe[df_rfe["RFE"] <= 1] # Select Variables that were True
df_rfe = pd.merge(df_rfe, df_pca_rf, on = "Features", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
### Multiple Regression
pca_rf_rfe = df_rfe["Features"].tolist() # Save chosen featres as list
regression = LinearRegression() # Linear Regression in scikit learn
regression.fit(X_c[pca_rf_rfe], Y_quant) # Fit model
coef = regression.coef_ # Coefficient models as scipy array
l_reg = list(zip(X_c[pca_rf_rfe], coef)) # Create list of variables alongside coefficient
df_reg = pd.DataFrame(l_reg, columns = ["Features", "Coefficients"]) # Create data frame of importances with variables and gini column names
df_reg = df_reg.sort_values(by = ["Coefficients"], ascending = False) # Sort Columns by Value
### Export feature attributes
fp_X_c = pd.merge(df_rfe, df_reg, on = "Features", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
## Identify Predictors for Ecological Globals
### Principal Component Analysis
degree = len(X_g.index) - 1 # Save number of features -1 to get degrees of freedom
pca = PCA(n_components = degree) # Pass the number of components to make PCA model based on degrees of freedom
pca.fit(X_g) # Fit initial PCA model
df_comp = pd.DataFrame(pca.explained_variance_) # Print explained variance of components
df_comp = df_comp[(df_comp[0] > 1)] # Save eigenvalues above 1 to identify components
components = len(df_comp.index) - 3 # Save count of components for Variable reduction
pca = PCA(n_components = components) # you will pass the number of components to make PCA model
pca.fit_transform(X_g) # finally call fit_transform on the aggregate data to create PCA results object
df_pc = pd.DataFrame(pca.components_, columns = X_g.columns) # Export eigenvectors to data frame with column names from original data
df_pc["Variance"] = pca.explained_variance_ratio_ # Save eigenvalues as their own column
df_pc = df_pc[df_pc["Variance"] > df_pc["Variance"].mean()] # Susbet by eigenvalues with above average exlained variance ratio
df_pc = df_pc.abs() # Get absolute value of eigenvalues
df_pc = df_pc.drop(columns = ["Variance"]) # Drop outcomes and targets
df_p = pd.DataFrame(df_pc.max(), columns = ["MaxEV"]) # select maximum eigenvector for each feature
df_p = df_p[df_p.MaxEV > df_p.MaxEV.mean()] # Susbet by above average max eigenvalues
df_p = df_p.reset_index() # Add a new index of ascending values, existing index consisting of feature labels becomes column named "index"
df_pca = df_p.rename(columns = {"index": "Features"}) # Rename former index as features
df_pca = df_pca.sort_values(by = ["MaxEV"], ascending = False) # Sort Columns by Value
### Random Forest Regresson
forest = RandomForestRegressor(n_estimators = 1000, max_depth = 10) #Use default values except for number of trees. For a further explanation see readme included in repository.
forest.fit(X_g, Y_quant) # Fit Forest model, This will take time
rf = forest.feature_importances_ # Output importances of features
l_rf = list(zip(X_g, rf)) # Create list of variables alongside importance scores
df_rf = pd.DataFrame(l_rf, columns = ["Features", "Gini"]) # Create data frame of importances with variables and gini column names
df_rf = df_rf[(df_rf["Gini"] > df_rf["Gini"].mean())] # Subset by Gini values higher than mean
df_rf = df_rf.sort_values(by = ["Gini"], ascending = False) # Sort Columns by Value
### Recursive Feature Elimination
df_pca_rf = pd.merge(df_pca, df_rf, on = "Features", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
pca_rf = df_pca_rf["Features"].tolist() # Save features from data frame
recursive = RFECV(estimator = LinearRegression(), min_features_to_select = 5) # define selection parameters, in this case all features are selected. See Readme for more ifo
recursive.fit(X_g[pca_rf], Y_quant) # This will take time
rfe = recursive.support_ # Save Boolean values as numpy array
l_rfe = list(zip(X_g[pca_rf], rfe)) # Create list of variables alongside RFE value
df_rfe = pd.DataFrame(l_rfe, columns = ["Features", "RFE"]) # Create data frame of importances with variables and gini column names
df_rfe = df_rfe[df_rfe.RFE == True] # Select Variables that were True
df_rfe = pd.merge(df_rfe, df_pca_rf, on = "Features", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
### Multiple Regression
pca_rf_rfe = df_rfe["Features"].tolist() # Save chosen featres as list
regression = LinearRegression() # Linear Regression in scikit learn
regression.fit(X_g[pca_rf_rfe], Y_quant) # Fit model
coef = regression.coef_ # Coefficient models as scipy array
l_reg = list(zip(X_g[pca_rf_rfe], coef)) # Create list of variables alongside coefficient
df_reg = pd.DataFrame(l_reg, columns = ["Features", "Coefficients"]) # Create data frame of importances with variables and gini column names
df_reg = df_reg.sort_values(by = ["Coefficients"], ascending = False) # Sort Columns by Value
### Export feature attributes
fp_X_g = pd.merge(df_rfe, df_reg, on = "Features", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
### Append to Text File
text_file = open(path + day + "_results" + label + ".txt", "a") # Open text file and name with subproject, content, and result suffix
text_file.write(sub3 + "\n\n") # Line of text with space after
text_file.write(a + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Models = " + m3 + m4 + m5 + m6 +"\n") # Add two lines of blank text at end of every section text
text_file.write(" Values = Eigenvectors, Gini Impurity, True, OLS" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Thresholds = Mean, Mean, Cross Validation, All" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Outcome = quant, 2020 VBP Adjustment Factor" + "\n")
text_file.write(str(fp_X_a) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(d + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Models = " m6 +"\n") # Add two lines of blank text at end of every section text
text_file.write(" Values = OLS" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Thresholds = All" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Outcome = quant, 2020 VBP Adjustment Factor" + "\n")
text_file.write(str(fp_X_d) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(c + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Models = " + m3 + m4 + m5 + m6 +"\n") # Add two lines of blank text at end of every section text
text_file.write(" Values = Eigenvectors, Gini Impurity, True, OLS" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Thresholds = Mean, Mean, Cross Validation, All" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Outcome = quant, 2020 VBP Adjustment Factor" + "\n")
text_file.write(str(fp_X_c) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(g + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Models = " + m3 + m4 + m5 + m6 +"\n") # Add two lines of blank text at end of every section text
text_file.write(" Values = Eigenvectors, Gini Impurity, Rank, OLS" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Thresholds = Mean, Mean, Cross Validation, All" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Outcome = quant, 2020 VBP Adjustment Factor" + "\n")
text_file.write(str(fp_X_g) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write("####################" + "\n\n")
text_file.close() # Close file
# Step 2: Final Prediction with Open and Closed Box Models
sub4 = "Step 2: Final Prediction with Open and Closed Box Models"
f = "Final Model of Identified Predictors"
## Isolate Identified Predictors from Raw Data for Final Models
### Pull final feature list from raw data
f_a = fp_X_a["Features"].tolist()
f_d = fp_X_d["Features"].tolist()
f_c = fp_X_c["Features"].tolist()
f_g = fp_X_g["Features"].tolist()
X_f_a = X_a_raw[f_a]
X_f_d = X_d_raw[f_d]
X_f_c = X_c_raw[f_c]
X_f_g = X_g_raw[f_g]
### Join raw predictors and raw outcome data using Facility ID index
X_f_a = X_f_a.reset_index()
X_f_d = X_f_d.reset_index()
X_f_c = X_f_c.reset_index()
X_f_g = X_f_g.reset_index()
X_i = pd.merge(X_f_a, X_f_d, on = "Facility ID", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
X_e = pd.merge(X_f_g, X_f_c, on = "Facility ID", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
X_f = pd.merge(X_e, X_i, on = "Facility ID", how = "inner") # Join by column while keeping only items that exist in both, select outer or left for other options
Y_f = Y_raw.reset_index()
df_f = pd.merge(Y_f, X_f, on = "Facility ID", how = "outer") # Join by column while keeping only items that exist in both, select outer or left for other options
df_f = df_f.set_index("Facility ID") # Set column as index
### Tidy for Multiple Regression, Scale for Multi-layer Perceptron
df_f = df_f.drop(columns = ["FIPS"]) # Drop Unwanted Columns
df_f = df_f.dropna()
ss_f = pd.DataFrame(StandardScaler().fit_transform(df_f.values), columns = df_f.columns) # Standard scale values by converting the normalized features into a tabular format with the help of DataFrame.
## Compare Final Predictors with Regression Model
### Build Regression Model
Y = df_f.filter(["quant"])
X = df_f.drop(columns = ["quant", "train", "test", "test2"])
mod = sm.OLS(Y, X) # Describe linear model
res_f = mod.fit() # Fit model
## Mutli-Layer Perceptron for Indetified Final Predictors
### Build Network with keras Sequential API
# Prep Inputs
Y_train = ss_f.filter(["train"])
Y_test = ss_f.filter(["test"])
Y_test2 = ss_f.filter(["test2"])
X = ss_f.drop(columns = ["quant", "train", "test", "test2"])
input = X.shape[1] # Save number of columns as length minus quant, test, train and round to nearest integer
nodes = round(input / 2) # Number of input dimensions divided by two for nodes in each layer
network = Sequential()
# Dense Layers
network.add(Dense(nodes, activation = 'relu', kernel_initializer = 'random_normal', input_dim = input)) # First Hidden Layer
network.add(Dense(nodes, activation = 'relu', kernel_initializer = 'random_normal')) # Second Hidden Layer
# Activation Layer
network.add(Dense(1, activation = 'sigmoid', kernel_initializer = 'random_normal')) # Output Layer
# Compile
network.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy']) # Compile network with ADAM ("Adaptive moment estimation" or RMSProp + Momentum)
# Fit
network.fit(X, Y_train, batch_size = 10, epochs = 100) # Fitting the data to the train outcome
# Predict
Y_f = network.predict(X) # Predict values from testing model
# AUC Score
Y_pred = (Y_f > 0.5)
Y_train = (Y_train > 0)
fpr, tpr, threshold = roc_curve(Y_train, Y_pred) # Create ROC outputs, true positive rate and false positive rate
f_train = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve(Y_test, Y_pred) # Create ROC outputs, true positive rate and false positive rate
f_test = auc(fpr, tpr) # Plot ROC and get AUC score
fpr, tpr, threshold = roc_curve(Y_test2, Y_pred) # Create ROC outputs, true positive rate and false positive rate
f_test2 = auc(fpr, tpr) # Plot ROC and get AUC score
### Append to Text File
text_file = open(path + day + "_results" + label + ".txt", "a") # Open text file and name with subproject, content, and result suffix
text_file.write(sub4 + "\n\n") # Line of text with space after
text_file.write(f + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Models = " m6 + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Outcome = quant, 2020 VBP Adjustment Factor" + "\n")
text_file.write(str(res_f.summary()) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write(f + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Network = " + m1 +"\n") # Add two lines of blank text at end of every section text
text_file.write(" Layers = Dense, Dense, Activation" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Functions = ReLU, ReLU, Sigmoid" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Epochs = 100" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" Targets = (train, test, test2), (FY2020, FY2019, FY2018)" + "\n")
text_file.write(" AUC Scores" + "\n") # Add two lines of blank text at end of every section text
text_file.write(" train = " + str(f_train) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(f_test) + "\n") # Add two lines of blank text at end of every section text
text_file.write(" test = " + str(f_test2) + "\n\n") # Add two lines of blank text at end of every section text
text_file.write("####################" + "\n\n")
text_file.close() # Close file
| 70.460857
| 205
| 0.724875
| 7,759
| 47,702
| 4.333548
| 0.076427
| 0.032358
| 0.045236
| 0.032477
| 0.854033
| 0.835296
| 0.814478
| 0.808381
| 0.801213
| 0.780276
| 0
| 0.013201
| 0.166282
| 47,702
| 677
| 206
| 70.460857
| 0.832261
| 0.464048
| 0
| 0.446184
| 0
| 0.003914
| 0.256118
| 0.007169
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.046967
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d22eea61f13c855c668b0542247e1507004c2af4
| 212
|
py
|
Python
|
musicmore/core/views.py
|
IPenuelas/musicmore_r01
|
94726c84aa61201940916feb4d5e535bcd4dc230
|
[
"MIT"
] | null | null | null |
musicmore/core/views.py
|
IPenuelas/musicmore_r01
|
94726c84aa61201940916feb4d5e535bcd4dc230
|
[
"MIT"
] | null | null | null |
musicmore/core/views.py
|
IPenuelas/musicmore_r01
|
94726c84aa61201940916feb4d5e535bcd4dc230
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, HttpResponse
# Create your views here.
# Create your views here.
def home(request):
#return render(request, "shop/shop.html")
return render(request, "core/home.html")
| 30.285714
| 49
| 0.740566
| 29
| 212
| 5.413793
| 0.586207
| 0.127389
| 0.191083
| 0.242038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146226
| 212
| 7
| 50
| 30.285714
| 0.867403
| 0.410377
| 0
| 0
| 0
| 0
| 0.114754
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
d23b75d3e7e8181f4dd49e5a617f6e93dbd21c58
| 12,157
|
py
|
Python
|
tests/test_metric_full.py
|
theodumont/client
|
7402ac67ada5bc8078078a49fd3e0cb4b6172307
|
[
"MIT"
] | 3,968
|
2017-08-23T21:27:19.000Z
|
2022-03-31T22:00:19.000Z
|
tests/test_metric_full.py
|
theodumont/client
|
7402ac67ada5bc8078078a49fd3e0cb4b6172307
|
[
"MIT"
] | 2,725
|
2017-04-17T00:29:15.000Z
|
2022-03-31T21:01:53.000Z
|
tests/test_metric_full.py
|
theodumont/client
|
7402ac67ada5bc8078078a49fd3e0cb4b6172307
|
[
"MIT"
] | 351
|
2018-04-08T19:39:34.000Z
|
2022-03-30T19:38:08.000Z
|
"""
metric full tests.
"""
import math
import numpy as np
import six
import wandb
from wandb.proto import wandb_telemetry_pb2 as tpb
def test_metric_default(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.log(dict(mystep=1, val=2))
run.log(dict(mystep=2, val=8))
run.log(dict(mystep=3, val=3))
run.log(dict(val2=4))
run.log(dict(val2=1))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
# by default we use last value
summary = ctx_util.summary
assert six.viewitems(dict(val=3, val2=1)) <= six.viewitems(summary)
def test_metric_copy(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("*", summary="copy")
run.log(dict(mystep=1, val=2))
run.log(dict(mystep=2, val=8))
run.log(dict(mystep=3, val=3))
run.log(dict(val2=4))
run.log(dict(val2=1))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert six.viewitems(dict(val=3, val2=1, mystep=3)) <= six.viewitems(summary)
def test_metric_glob_none(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("*", summary="copy")
run.define_metric("val", summary="none")
run.log(dict(mystep=1, val=2))
run.log(dict(mystep=2, val=8))
run.log(dict(mystep=3, val=3))
run.log(dict(val2=4))
run.log(dict(val2=1))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert six.viewitems(dict(val2=1, mystep=3)) <= six.viewitems(summary)
assert "val" not in summary
def test_metric_glob(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("*", step_metric="mystep")
run.log(dict(mystep=1, val=2))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert six.viewitems(dict(val=2)) <= six.viewitems(summary)
def test_metric_nosummary(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val")
run.log(dict(val2=4))
run.log(dict(val2=1))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert six.viewitems(dict(val2=1)) <= six.viewitems(summary)
def test_metric_none(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val2", summary="none")
run.log(dict(val2=4))
run.log(dict(val2=1))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert "val2" not in summary
def test_metric_sum_none(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val")
run.log(dict(mystep=1, val=2))
run.log(dict(mystep=1, val=8))
run.log(dict(mystep=1, val=3))
run.log(dict(val2=4))
run.log(dict(val2=1))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
# if we set a metric, last is NOT disabled
assert six.viewitems(dict(val=3, val2=1)) <= six.viewitems(summary)
def test_metric_max(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val", summary="max")
run.log(dict(mystep=1, val=2))
run.log(dict(mystep=1, val=8))
run.log(dict(mystep=1, val=3))
assert run.summary.get("val") and run.summary["val"].get("max") == 8
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert summary.get("val", {}).get("max") == 8
def test_metric_min(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val", summary="min")
run.log(dict(mystep=1, val=2))
run.log(dict(mystep=1, val=8))
run.log(dict(mystep=1, val=3))
assert run.summary.get("val") and run.summary["val"].get("min") == 2
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert summary.get("val", {}).get("min") == 2
def test_metric_last(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val", summary="last")
run.log(dict(mystep=1, val=2))
run.log(dict(mystep=1, val=8))
run.log(dict(mystep=1, val=3))
assert run.summary.get("val") and run.summary["val"].get("last") == 3
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert summary.get("val", {}).get("last") == 3
def _gen_metric_sync_step(run):
run.log(dict(val=2, val2=5, mystep=1))
run.log(dict(mystep=3))
run.log(dict(val=8))
run.log(dict(val2=8))
run.log(dict(val=3, mystep=5))
run.finish()
def test_metric_no_sync_step(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val", summary="min", step_metric="mystep", step_sync=False)
_gen_metric_sync_step(run)
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
history = ctx_util.history
metrics = ctx_util.metrics
assert summary.get("val", {}).get("min") == 2
history_val = [(h.get("val"), h.get("mystep")) for h in history if "val" in h]
assert history_val == [(2, 1), (8, None), (3, 5)]
assert metrics and len(metrics) == 2
def test_metric_sync_step(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val", summary="min", step_metric="mystep", step_sync=True)
_gen_metric_sync_step(run)
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
history = ctx_util.history
telemetry = ctx_util.telemetry
metrics = ctx_util.metrics
assert summary.get("val", {}).get("min") == 2
history_val = [(h.get("val"), h.get("mystep")) for h in history if "val" in h]
assert history_val == [(2, 1), (8, 3), (3, 5)]
assert not any([item[1] is None for item in history_val])
# metric in telemetry options
assert telemetry and 7 in telemetry.get("3", [])
assert metrics and len(metrics) == 2
def test_metric_mult(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("mystep", hide=True)
run.define_metric("*", step_metric="mystep")
_gen_metric_sync_step(run)
ctx_util = parse_ctx(live_mock_server.get_ctx())
metrics = ctx_util.metrics
assert metrics and len(metrics) == 3
def test_metric_goal(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("mystep", hide=True)
run.define_metric("*", step_metric="mystep", goal="maximize")
_gen_metric_sync_step(run)
ctx_util = parse_ctx(live_mock_server.get_ctx())
metrics = ctx_util.metrics
assert metrics and len(metrics) == 3
def test_metric_nan_mean(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val", summary="mean")
run.log(dict(mystep=1, val=2))
run.log(dict(mystep=1, val=float("nan")))
run.log(dict(mystep=1, val=4))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert summary.get("val", {}).get("mean") == 3
def test_metric_nan_min_norm(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val", summary="min")
run.log(dict(mystep=1, val=float("nan")))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert "min" not in summary.get("val", {})
def test_metric_nan_min_more(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("val", summary="min")
run.log(dict(mystep=1, val=float("nan")))
run.log(dict(mystep=1, val=4))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert summary.get("val", {}).get("min") == 4
def test_metric_nested_default(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.log(dict(this=dict(that=3)))
run.log(dict(this=dict(that=2)))
run.log(dict(this=dict(that=4)))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert summary.get("this", {}).get("that", {}) == 4
def test_metric_nested_copy(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("this.that", summary="copy")
run.log(dict(this=dict(that=3)))
run.log(dict(this=dict(that=2)))
run.log(dict(this=dict(that=4)))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert summary.get("this", {}).get("that", {}) == 4
def test_metric_nested_min(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("this.that", summary="min")
run.log(dict(this=dict(that=3)))
run.log(dict(this=dict(that=2)))
run.log(dict(this=dict(that=4)))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
assert summary.get("this", {}).get("that", {}).get("min") == 2
def test_metric_nested_mult(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("this.that", summary="min,max")
run.log(dict(this=dict(that=3)))
run.log(dict(this=dict(that=2)))
run.log(dict(this=dict(that=4)))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
metrics = ctx_util.metrics
assert summary.get("this", {}).get("that", {}).get("min") == 2
assert summary.get("this", {}).get("that", {}).get("max") == 4
assert len(metrics) == 1
assert metrics[0] == {"1": "this.that", "7": [1, 2], "6": [3]}
def test_metric_dotted(live_mock_server, test_settings, parse_ctx):
"""escaped dotted define metric matches dotted metrics."""
run = wandb.init(settings=test_settings)
run.define_metric("this\\.that", summary="min")
run.log({"this.that": 3})
run.log({"this.that": 2})
run.log({"this.that": 4})
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
metrics = ctx_util.metrics
assert summary.get("this.that", {}).get("min") == 2
assert len(metrics) == 1
assert metrics[0] == {"1": "this\\.that", "7": [1], "6": [3]}
def test_metric_nested_glob(live_mock_server, test_settings, parse_ctx):
run = wandb.init(settings=test_settings)
run.define_metric("*", summary="min,max")
run.log(dict(this=dict(that=3)))
run.log(dict(this=dict(that=2)))
run.log(dict(this=dict(that=4)))
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
summary = ctx_util.summary
metrics = ctx_util.metrics
assert summary.get("this", {}).get("that", {}).get("min") == 2
assert summary.get("this", {}).get("that", {}).get("max") == 4
assert len(metrics) == 1
assert metrics[0] == {"1": "this.that", "7": [1, 2]}
def test_metric_debouncing(live_mock_server, test_settings):
# addresses WB-5424
run = wandb.init(settings=test_settings)
run.define_metric("*", summary="min,max")
# test many defined metrics logged at once
log_arg = {str(i): i for i in range(100)}
run.log(log_arg)
# and serially
for i in range(100, 200):
run.log({str(i): i})
run.finish()
ctx = live_mock_server.get_ctx()
# without debouncing, the number of config updates should be ~200, one for each defined metric.
# with debouncing, the number should be << 12 (the minimum number of debounce loops to exceed the
# 60s test timeout at a 5s debounce interval)
assert ctx["upsert_bucket_count"] <= 12
| 34.635328
| 101
| 0.679279
| 1,898
| 12,157
| 4.143309
| 0.071654
| 0.049593
| 0.076297
| 0.059003
| 0.876017
| 0.843845
| 0.823499
| 0.812436
| 0.805824
| 0.794761
| 0
| 0.020031
| 0.162293
| 12,157
| 350
| 102
| 34.734286
| 0.75216
| 0.039154
| 0
| 0.707407
| 0
| 0
| 0.042024
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 1
| 0.092593
| false
| 0
| 0.018519
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d26e95d496f2b5851d67e9ed03d8a6328ef3bc86
| 303
|
py
|
Python
|
src/NetworkSimulator/NetworkSimulator/__init__.py
|
zawaki/nara_revision
|
28bb42f7ca3a768075748d258c405addc7b28c31
|
[
"MIT"
] | null | null | null |
src/NetworkSimulator/NetworkSimulator/__init__.py
|
zawaki/nara_revision
|
28bb42f7ca3a768075748d258c405addc7b28c31
|
[
"MIT"
] | null | null | null |
src/NetworkSimulator/NetworkSimulator/__init__.py
|
zawaki/nara_revision
|
28bb42f7ca3a768075748d258c405addc7b28c31
|
[
"MIT"
] | null | null | null |
from NetworkSimulator.GlobalMessageBuffer import *
from NetworkSimulator.utilz import *
from NetworkSimulator.Network import *
from NetworkSimulator.NetworkGenerator import *
from NetworkSimulator.NetworkManager import *
from NetworkSimulator.Components import *
from NetworkSimulator.Requests import *
| 37.875
| 50
| 0.861386
| 28
| 303
| 9.321429
| 0.357143
| 0.536398
| 0.597701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092409
| 303
| 7
| 51
| 43.285714
| 0.949091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d2724258252a04e1de262ac67c4a721c374e8b28
| 10,934
|
py
|
Python
|
python/seldon_deploy_sdk/api/kubernetes_resources_api.py
|
SachinVarghese/seldon-deploy-sdk
|
2c70e249c084f113a998ab876c29843ae5f6a99a
|
[
"Apache-2.0"
] | 6
|
2021-02-18T14:37:54.000Z
|
2022-01-13T13:27:43.000Z
|
python/seldon_deploy_sdk/api/kubernetes_resources_api.py
|
SachinVarghese/seldon-deploy-sdk
|
2c70e249c084f113a998ab876c29843ae5f6a99a
|
[
"Apache-2.0"
] | 14
|
2021-01-04T16:32:03.000Z
|
2021-12-13T17:53:59.000Z
|
python/seldon_deploy_sdk/api/kubernetes_resources_api.py
|
SachinVarghese/seldon-deploy-sdk
|
2c70e249c084f113a998ab876c29843ae5f6a99a
|
[
"Apache-2.0"
] | 7
|
2021-03-17T09:05:55.000Z
|
2022-01-05T10:39:56.000Z
|
# coding: utf-8
"""
Seldon Deploy API
API to interact and manage the lifecycle of your machine learning models deployed through Seldon Deploy. # noqa: E501
OpenAPI spec version: v1alpha1
Contact: hello@seldon.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from seldon_deploy_sdk.api_client import ApiClient
class KubernetesResourcesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def list_inference_service_resources(self, name, namespace, **kwargs): # noqa: E501
"""list_inference_service_resources # noqa: E501
list objects of kind resource for Inference Service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_inference_service_resources(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Name identifies a resource (required)
:param str namespace: Namespace provides a logical grouping of resources (required)
:param str component: Component differentiates between types of model (e.g. predictor, explainer... etc)
:return: list[Component]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_inference_service_resources_with_http_info(name, namespace, **kwargs) # noqa: E501
else:
(data) = self.list_inference_service_resources_with_http_info(name, namespace, **kwargs) # noqa: E501
return data
def list_inference_service_resources_with_http_info(self, name, namespace, **kwargs): # noqa: E501
"""list_inference_service_resources # noqa: E501
list objects of kind resource for Inference Service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_inference_service_resources_with_http_info(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Name identifies a resource (required)
:param str namespace: Namespace provides a logical grouping of resources (required)
:param str component: Component differentiates between types of model (e.g. predictor, explainer... etc)
:return: list[Component]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'component'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_inference_service_resources" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `list_inference_service_resources`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_inference_service_resources`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'component' in params:
query_params.append(('component', params['component'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/namespaces/{namespace}/inferenceservices/{name}/resources', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Component]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_seldon_deployment_resources(self, name, namespace, **kwargs): # noqa: E501
"""list_seldon_deployment_resources # noqa: E501
list objects of kind resource for Seldon Deployment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_seldon_deployment_resources(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Name identifies a resource (required)
:param str namespace: Namespace provides a logical grouping of resources (required)
:param str component: Component differentiates between types of model (e.g. predictor, explainer... etc)
:return: list[Component]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_seldon_deployment_resources_with_http_info(name, namespace, **kwargs) # noqa: E501
else:
(data) = self.list_seldon_deployment_resources_with_http_info(name, namespace, **kwargs) # noqa: E501
return data
def list_seldon_deployment_resources_with_http_info(self, name, namespace, **kwargs): # noqa: E501
"""list_seldon_deployment_resources # noqa: E501
list objects of kind resource for Seldon Deployment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_seldon_deployment_resources_with_http_info(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Name identifies a resource (required)
:param str namespace: Namespace provides a logical grouping of resources (required)
:param str component: Component differentiates between types of model (e.g. predictor, explainer... etc)
:return: list[Component]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'component'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_seldon_deployment_resources" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `list_seldon_deployment_resources`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_seldon_deployment_resources`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'component' in params:
query_params.append(('component', params['component'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/namespaces/{namespace}/seldondeployments/{name}/resources', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Component]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.544747
| 136
| 0.638467
| 1,248
| 10,934
| 5.379808
| 0.145032
| 0.04647
| 0.032767
| 0.047513
| 0.913018
| 0.906166
| 0.906166
| 0.89008
| 0.877867
| 0.877867
| 0
| 0.015981
| 0.273185
| 10,934
| 256
| 137
| 42.710938
| 0.828866
| 0.37205
| 0
| 0.793893
| 0
| 0
| 0.212054
| 0.070685
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038168
| false
| 0
| 0.030534
| 0
| 0.122137
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d282d2b1f15590cb531a6b105adc1259e732c399
| 791
|
py
|
Python
|
rastervision_core/rastervision/core/data/raster_transformer/__init__.py
|
khlaifiabilel/raster-vision
|
864d36df901eff7a7d218b0511b7afa682eb28f2
|
[
"Apache-2.0"
] | null | null | null |
rastervision_core/rastervision/core/data/raster_transformer/__init__.py
|
khlaifiabilel/raster-vision
|
864d36df901eff7a7d218b0511b7afa682eb28f2
|
[
"Apache-2.0"
] | 70
|
2018-12-21T15:38:04.000Z
|
2022-03-01T13:01:47.000Z
|
rastervision_core/rastervision/core/data/raster_transformer/__init__.py
|
jamesmcclain/raster-vision
|
597c196e9fa0b66163ab9049645134b4962e7456
|
[
"Apache-2.0"
] | 1
|
2022-01-25T09:13:23.000Z
|
2022-01-25T09:13:23.000Z
|
# flake8: noqa
from rastervision.core.data.raster_transformer.raster_transformer import *
from rastervision.core.data.raster_transformer.raster_transformer_config import *
from rastervision.core.data.raster_transformer.stats_transformer import *
from rastervision.core.data.raster_transformer.stats_transformer_config import *
from rastervision.core.data.raster_transformer.nan_transformer import *
from rastervision.core.data.raster_transformer.nan_transformer_config import *
from rastervision.core.data.raster_transformer.cast_transformer import *
from rastervision.core.data.raster_transformer.cast_transformer_config import *
from rastervision.core.data.raster_transformer.reclass_transformer import *
from rastervision.core.data.raster_transformer.reclass_transformer_config import *
| 60.846154
| 82
| 0.878635
| 97
| 791
| 6.907216
| 0.14433
| 0.304478
| 0.298507
| 0.358209
| 0.967164
| 0.967164
| 0.967164
| 0.967164
| 0.801493
| 0
| 0
| 0.001339
| 0.055626
| 791
| 12
| 83
| 65.916667
| 0.895582
| 0.015171
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
963e82e9676bef13b0542f785d246df6ca7c5d9d
| 13,323
|
py
|
Python
|
tests/benchmark/scenarios/neutron/test_network.py
|
kambiz-aghaiepour/rally
|
be708bacf0bc898a9538b9b6cb0ba4e1c015c1f2
|
[
"Apache-2.0"
] | 1
|
2020-09-09T19:01:41.000Z
|
2020-09-09T19:01:41.000Z
|
tests/benchmark/scenarios/neutron/test_network.py
|
kambiz-aghaiepour/rally
|
be708bacf0bc898a9538b9b6cb0ba4e1c015c1f2
|
[
"Apache-2.0"
] | null | null | null |
tests/benchmark/scenarios/neutron/test_network.py
|
kambiz-aghaiepour/rally
|
be708bacf0bc898a9538b9b6cb0ba4e1c015c1f2
|
[
"Apache-2.0"
] | 1
|
2020-09-09T19:01:43.000Z
|
2020-09-09T19:01:43.000Z
|
# Copyright 2014: Intel Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally.benchmark.scenarios.neutron import network
from tests import test
NEUTRON_NETWORKS = "rally.benchmark.scenarios.neutron.network.NeutronNetworks"
class NeutronNetworksTestCase(test.TestCase):
@mock.patch(NEUTRON_NETWORKS + "._list_networks")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
def test_create_and_list_networks(self, mock_create, mock_list):
neutron_scenario = network.NeutronNetworks()
# Default options
network_create_args = {}
neutron_scenario.create_and_list_networks(
network_create_args=network_create_args)
mock_create.assert_called_once_with(network_create_args)
mock_list.assert_called_once_with()
mock_create.reset_mock()
mock_list.reset_mock()
# Explicit network name is specified
network_create_args = {"name": "given-name"}
neutron_scenario.create_and_list_networks(
network_create_args=network_create_args)
mock_create.assert_called_once_with(network_create_args)
mock_list.assert_called_once_with()
@mock.patch(NEUTRON_NETWORKS + "._delete_network")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
def test_create_and_delete_networks(self, mock_create, mock_delete):
neutron_scenario = network.NeutronNetworks()
# Default options
network_create_args = {}
neutron_scenario.create_and_delete_networks()
mock_create.assert_called_once_with(network_create_args)
mock_delete.assert_called_once()
mock_create.reset_mock()
mock_delete.reset_mock()
# Explict network name is specified
network_create_args = {"name": "given-name"}
neutron_scenario.create_and_delete_networks(
network_create_args=network_create_args)
mock_create.assert_called_once_with(network_create_args)
mock_delete.assert_called_once()
@mock.patch(NEUTRON_NETWORKS + "._list_subnets")
@mock.patch(NEUTRON_NETWORKS + "._create_subnet")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
@mock.patch(NEUTRON_NETWORKS + ".SUBNET_CIDR_START",
new_callable=mock.PropertyMock(return_value="default_cidr"))
def test_create_and_list_subnets(self,
mock_cidr_start,
mock_create_network,
mock_create_subnet,
mock_list):
scenario = network.NeutronNetworks()
mock_create_network.return_value = {"network": {"id": "fake-id"}}
subnets_per_network = 4
self.assertRaises(TypeError, scenario.create_and_list_subnets)
mock_create_network.reset_mock()
mock_create_subnet.reset_mock()
mock_list.reset_mock()
# Default options
scenario.create_and_list_subnets(
subnets_per_network=subnets_per_network)
mock_create_network.assert_called_once_with({})
self.assertEqual(mock_create_subnet.mock_calls,
[mock.call({"network": {"id": "fake-id"}},
{})] * subnets_per_network)
mock_list.assert_called_once_with()
self.assertEqual(scenario.SUBNET_CIDR_START, "default_cidr")
mock_create_network.reset_mock()
mock_create_subnet.reset_mock()
mock_list.reset_mock()
# Custom options
scenario.create_and_list_subnets(
subnet_create_args={"allocation_pools": []},
subnet_cidr_start="custom_cidr",
subnets_per_network=subnets_per_network)
self.assertEqual(scenario.SUBNET_CIDR_START, "custom_cidr")
mock_create_network.assert_called_once_with({})
self.assertEqual(
mock_create_subnet.mock_calls,
[mock.call({"network": {"id": "fake-id"}},
{"allocation_pools": []})] * subnets_per_network)
mock_list.assert_called_once_with()
@mock.patch(NEUTRON_NETWORKS + "._delete_subnet")
@mock.patch(NEUTRON_NETWORKS + "._create_subnet")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
@mock.patch(NEUTRON_NETWORKS + ".SUBNET_CIDR_START",
new_callable=mock.PropertyMock(return_value="default_cidr"))
def test_create_and_delete_subnets(self,
mock_cidr_start,
mock_create_network,
mock_create_subnet,
mock_delete):
scenario = network.NeutronNetworks()
mock_create_network.return_value = {"network": {"id": "fake-id"}}
subnets_per_network = 4
self.assertRaises(TypeError, scenario.create_and_delete_subnets)
mock_create_network.reset_mock()
mock_create_subnet.reset_mock()
mock_delete.reset_mock()
# Default options
scenario.create_and_delete_subnets(
subnets_per_network=subnets_per_network)
mock_create_network.assert_called_once_with({})
self.assertEqual(mock_create_subnet.mock_calls,
[mock.call({"network": {"id": "fake-id"}},
{})] * subnets_per_network)
mock_delete.assert_called_once()
self.assertEqual(scenario.SUBNET_CIDR_START, "default_cidr")
mock_create_network.reset_mock()
mock_create_subnet.reset_mock()
mock_delete.reset_mock()
# Custom options
scenario.create_and_delete_subnets(
subnet_create_args={"allocation_pools": []},
subnet_cidr_start="custom_cidr",
subnets_per_network=subnets_per_network)
self.assertEqual(scenario.SUBNET_CIDR_START, "custom_cidr")
mock_create_network.assert_called_once_with({})
self.assertEqual(
mock_create_subnet.mock_calls,
[mock.call({"network": {"id": "fake-id"}},
{"allocation_pools": []})] * subnets_per_network)
mock_delete.assert_called_once()
@mock.patch(NEUTRON_NETWORKS + "._list_routers")
@mock.patch(NEUTRON_NETWORKS + "._create_router")
@mock.patch(NEUTRON_NETWORKS + "._create_subnet")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
@mock.patch(NEUTRON_NETWORKS + ".clients")
def test_create_and_list_routers(self,
mock_clients,
mock_create_network,
mock_create_subnet,
mock_create_router,
mock_list):
scenario = network.NeutronNetworks()
subnets_per_network = 4
mock_clients("neutron").add_interface_router = mock.Mock()
net = {"network": {"id": "network-id"}}
mock_create_network.return_value = net
subnet = {"subnet": {"name": "subnet-name", "id": "subnet-id"}}
mock_create_subnet.return_value = subnet
router = {"router": {"name": "router-name", "id": "router-id"}}
mock_create_router.return_value = router
# Default options
scenario.create_and_list_routers(
subnets_per_network=subnets_per_network)
mock_create_network.assert_called_once_with({})
self.assertEqual(
mock_create_subnet.mock_calls,
[mock.call(net, {})] * subnets_per_network)
self.assertEqual(
mock_create_router.mock_calls,
[mock.call({})] * subnets_per_network)
self.assertEqual(
mock_clients("neutron").add_interface_router.mock_calls,
[mock.call(router["router"]["id"],
{"subnet_id": subnet["subnet"]["id"]})
] * subnets_per_network)
mock_create_network.reset_mock()
mock_create_subnet.reset_mock()
mock_create_router.reset_mock()
mock_clients("neutron").add_interface_router.reset_mock()
mock_list.reset_mock()
# Custom options
subnet_create_args = {"allocation_pools": []}
router_create_args = {"admin_state_up": False}
scenario.create_and_list_routers(
subnet_create_args=subnet_create_args,
subnet_cidr_start="custom_cidr",
subnets_per_network=subnets_per_network,
router_create_args=router_create_args)
self.assertEqual(scenario.SUBNET_CIDR_START, "custom_cidr")
mock_create_network.assert_called_once_with({})
self.assertEqual(
mock_create_subnet.mock_calls, [
mock.call({"network": {"id": "network-id"}},
subnet_create_args)
] * subnets_per_network)
self.assertEqual(
mock_create_router.mock_calls, [
mock.call(router_create_args)
] * subnets_per_network)
self.assertEqual(
mock_clients("neutron").add_interface_router.mock_calls, [
mock.call(router["router"]["id"],
{"subnet_id": subnet["subnet"]["id"]})
] * subnets_per_network)
mock_list.assert_called_once_with()
@mock.patch(NEUTRON_NETWORKS + "._generate_random_name")
@mock.patch(NEUTRON_NETWORKS + "._list_ports")
@mock.patch(NEUTRON_NETWORKS + "._create_port")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
def test_create_and_list_ports(self,
mock_create_network,
mock_create_port,
mock_list,
mock_random_name):
scenario = network.NeutronNetworks()
mock_random_name.return_value = "random-name"
net = {"network": {"id": "fake-id"}}
mock_create_network.return_value = net
ports_per_network = 10
self.assertRaises(TypeError, scenario.create_and_list_ports)
mock_create_network.reset_mock()
# Defaults
scenario.create_and_list_ports(ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({})
self.assertEqual(mock_create_port.mock_calls,
[mock.call(net, {})] * ports_per_network)
mock_list.assert_called_once_with()
mock_create_network.reset_mock()
mock_create_port.reset_mock()
mock_list.reset_mock()
# Custom options
scenario.create_and_list_ports(
network_create_args={"name": "given-name"},
port_create_args={"allocation_pools": []},
ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({"name": "given-name"})
self.assertEqual(
mock_create_port.mock_calls,
[mock.call(net, {"allocation_pools": []})] * ports_per_network)
mock_list.assert_called_once_with()
@mock.patch(NEUTRON_NETWORKS + "._generate_random_name")
@mock.patch(NEUTRON_NETWORKS + "._delete_port")
@mock.patch(NEUTRON_NETWORKS + "._create_port")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
def test_create_and_delete_ports(self,
mock_create_network,
mock_create_port,
mock_delete,
mock_random_name):
scenario = network.NeutronNetworks()
mock_random_name.return_value = "random-name"
net = {"network": {"id": "fake-id"}}
mock_create_network.return_value = net
ports_per_network = 10
self.assertRaises(TypeError, scenario.create_and_delete_ports)
mock_create_network.reset_mock()
# Default options
scenario.create_and_delete_ports(ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({})
self.assertEqual(mock_create_port.mock_calls,
[mock.call(net, {})] * ports_per_network)
mock_delete.assert_called_once()
mock_create_network.reset_mock()
mock_create_port.reset_mock()
mock_delete.reset()
# Custom options
scenario.create_and_delete_ports(
network_create_args={"name": "given-name"},
port_create_args={"allocation_pools": []},
ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({"name": "given-name"})
self.assertEqual(
mock_create_port.mock_calls,
[mock.call(net, {"allocation_pools": []})] * ports_per_network)
mock_delete.assert_called_once()
| 42.028391
| 78
| 0.632815
| 1,440
| 13,323
| 5.408333
| 0.095139
| 0.083462
| 0.063303
| 0.077042
| 0.862352
| 0.823832
| 0.790447
| 0.765152
| 0.737673
| 0.726374
| 0
| 0.001548
| 0.272912
| 13,323
| 316
| 79
| 42.161392
| 0.802416
| 0.063499
| 0
| 0.744856
| 0
| 0
| 0.090617
| 0.008114
| 0
| 0
| 0
| 0
| 0.205761
| 1
| 0.028807
| false
| 0
| 0.012346
| 0
| 0.045267
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
963e93006adf5421b1cde0808970a0935a5d8ce2
| 94
|
py
|
Python
|
nnet/nn/functional/__init__.py
|
trip2eee/nnet2
|
2061cdf3c8e2ac3f0bdb9e077baa94c67803e99f
|
[
"MIT"
] | null | null | null |
nnet/nn/functional/__init__.py
|
trip2eee/nnet2
|
2061cdf3c8e2ac3f0bdb9e077baa94c67803e99f
|
[
"MIT"
] | null | null | null |
nnet/nn/functional/__init__.py
|
trip2eee/nnet2
|
2061cdf3c8e2ac3f0bdb9e077baa94c67803e99f
|
[
"MIT"
] | null | null | null |
from nnet.nn.functional.functional import *
from nnet.nn.functional.functional_conv import *
| 23.5
| 48
| 0.819149
| 13
| 94
| 5.846154
| 0.461538
| 0.210526
| 0.263158
| 0.526316
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095745
| 94
| 3
| 49
| 31.333333
| 0.894118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.