hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0877d0018768589575605449c3b0d84a1606f42b
| 168
|
py
|
Python
|
JetMETAnalysis/METSkims/python/METSkims_OutputModules_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
JetMETAnalysis/METSkims/python/METSkims_OutputModules_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
JetMETAnalysis/METSkims/python/METSkims_OutputModules_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from JetMETAnalysis.METSkims.metHigh_OutputModule_cfi import *
from JetMETAnalysis.METSkims.metLow_OutputModule_cfi import *
| 28
| 62
| 0.869048
| 20
| 168
| 7.1
| 0.65
| 0.253521
| 0.366197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 168
| 5
| 63
| 33.6
| 0.922078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
410656b35160cc35402a8f98869b981467d5dc82
| 11,675
|
py
|
Python
|
scale/diagnostic/views.py
|
stevevarner/scale
|
9623b261db4ddcf770f00df16afc91176142bb7c
|
[
"Apache-2.0"
] | null | null | null |
scale/diagnostic/views.py
|
stevevarner/scale
|
9623b261db4ddcf770f00df16afc91176142bb7c
|
[
"Apache-2.0"
] | null | null | null |
scale/diagnostic/views.py
|
stevevarner/scale
|
9623b261db4ddcf770f00df16afc91176142bb7c
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
import logging
from django.http.response import Http404
import rest_framework.status as status
from rest_framework.generics import GenericAPIView
from rest_framework.parsers import JSONParser
from rest_framework.response import Response
from job.models import JobType
from queue.models import Queue
from queue.serializers import QueueStatusSerializer
from recipe.configuration.data.recipe_data import LegacyRecipeData
from recipe.models import RecipeType
import util.rest as rest_util
from util.rest import BadParameter
logger = logging.getLogger(__name__)
class QueueScaleBakeView(GenericAPIView):
"""This view is the endpoint for queuing new Scale Bake jobs."""
parser_classes = (JSONParser,)
queryset = Queue.objects.all()
serializer_class = QueueStatusSerializer
def post(self, request):
"""Determine api version and call specific method
:param request: the HTTP POST request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
if request.version == 'v4':
return self.post_v4(request)
elif request.version == 'v5':
return self.post_v5(request)
elif request.version == 'v6':
return self.post_v6(request)
raise Http404()
# TODO: remove when REST API v4 is removed
def post_v4(self, request):
"""Handles v4 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_bake_jobs(request)
def post_v5(self, request):
"""Handles v5 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_bake_jobs(request)
def post_v6(self, request):
"""Handles v6 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_bake_jobs(request)
def queue_bake_jobs(self, request):
"""Creates and queues the specified number of Scale Bake jobs
:param request: the HTTP POST request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
num = rest_util.parse_int(request, 'num')
if num < 1:
raise BadParameter('num must be at least 1')
# TODO: in the future, send command message to do this asynchronously
job_type = JobType.objects.get(name='scale-bake', version='1.0')
for _ in xrange(num):
Queue.objects.queue_new_job_for_user(job_type, {})
return Response(status=status.HTTP_202_ACCEPTED)
class QueueScaleCasinoView(GenericAPIView):
"""This view is the endpoint for queuing new Scale Casino recipes."""
parser_classes = (JSONParser,)
queryset = Queue.objects.all()
serializer_class = QueueStatusSerializer
def post(self, request):
"""Determine api version and call specific method
:param request: the HTTP POST request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
if request.version == 'v4':
return self.post_v4(request)
elif request.version == 'v5':
return self.post_v5(request)
elif request.version == 'v6':
return self.post_v6(request)
raise Http404()
# TODO: remove when REST API v4 is removed
def post_v4(self, request):
"""Handles v4 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_casino_recipes(request)
def post_v5(self, request):
"""Handles v5 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_casino_recipes(request)
def post_v6(self, request):
"""Handles v6 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_casino_recipes(request)
def queue_casino_recipes(self, request):
"""Creates and queues the specified number of Scale Casino recipes
:param request: the HTTP POST request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
num = rest_util.parse_int(request, 'num')
if num < 1:
raise BadParameter('num must be at least 1')
# TODO: in the future, send command message to do this asynchronously
recipe_type = RecipeType.objects.get(name='scale-casino', version='1.0')
for _ in xrange(num):
Queue.objects.queue_new_recipe_for_user(recipe_type, LegacyRecipeData())
return Response(status=status.HTTP_202_ACCEPTED)
class QueueScaleHelloView(GenericAPIView):
"""This view is the endpoint for queuing new Scale Hello jobs."""
parser_classes = (JSONParser,)
queryset = Queue.objects.all()
serializer_class = QueueStatusSerializer
def post(self, request):
"""Determine api version and call specific method
:param request: the HTTP POST request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
if request.version == 'v4':
return self.post_v4(request)
elif request.version == 'v5':
return self.post_v5(request)
elif request.version == 'v6':
return self.post_v6(request)
raise Http404()
# TODO: remove when REST API v4 is removed
def post_v4(self, request):
"""Handles v4 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_hello_jobs(request)
def post_v5(self, request):
"""Handles v5 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_hello_jobs(request)
def post_v6(self, request):
"""Handles v6 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_hello_jobs(request)
def queue_hello_jobs(self, request):
"""Creates and queues the specified number of Scale Hello jobs
:param request: the HTTP POST request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
num = rest_util.parse_int(request, 'num')
if num < 1:
raise BadParameter('num must be at least 1')
# TODO: in the future, send command message to do this asynchronously
job_type = JobType.objects.get(name='scale-hello', version='1.0')
for _ in xrange(num):
Queue.objects.queue_new_job_for_user(job_type, {})
return Response(status=status.HTTP_202_ACCEPTED)
class QueueScaleRouletteView(GenericAPIView):
"""This view is the endpoint for queuing new Scale Roulette jobs."""
parser_classes = (JSONParser,)
queryset = Queue.objects.all()
serializer_class = QueueStatusSerializer
def post(self, request):
"""Determine api version and call specific method
:param request: the HTTP POST request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
if request.version == 'v4':
return self.post_v4(request)
elif request.version == 'v5':
return self.post_v5(request)
elif request.version == 'v6':
return self.post_v6(request)
raise Http404()
# TODO: remove when REST API v4 is removed
def post_v4(self, request):
"""Handles v4 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_roulette_jobs(request)
def post_v5(self, request):
"""Handles v5 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_roulette_jobs(request)
def post_v6(self, request):
"""Handles v6 post request
:param request: the HTTP GET request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
return self.queue_roulette_jobs(request)
def queue_roulette_jobs(self, request):
"""Creates and queues the specified number of Scale Roulette jobs
:param request: the HTTP POST request
:type request: :class:`rest_framework.request.Request`
:rtype: :class:`rest_framework.response.Response`
:returns: the HTTP response to send back to the user
"""
num = rest_util.parse_int(request, 'num')
if num < 1:
raise BadParameter('num must be at least 1')
# TODO: in the future, send command message to do this asynchronously
job_type = JobType.objects.get(name='scale-roulette', version='1.0')
for _ in xrange(num):
Queue.objects.queue_new_job_for_user(job_type, {})
return Response(status=status.HTTP_202_ACCEPTED)
| 34.338235
| 84
| 0.650193
| 1,433
| 11,675
| 5.183531
| 0.079553
| 0.077006
| 0.096931
| 0.051158
| 0.892165
| 0.892165
| 0.892165
| 0.892165
| 0.885972
| 0.885972
| 0
| 0.011013
| 0.261156
| 11,675
| 339
| 85
| 34.439528
| 0.850104
| 0.456702
| 0
| 0.764706
| 0
| 0
| 0.03392
| 0
| 0
| 0
| 0
| 0.023599
| 0
| 1
| 0.168067
| false
| 0
| 0.117647
| 0
| 0.655462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
de1cfe8a8edd5cb8267d19e27e2bf0434619c0fe
| 3,448
|
py
|
Python
|
stars/test/test_weapon.py
|
therussellhome/inherit_the_stars
|
aa498103e1683625f762afee8c95ffcebb77ca03
|
[
"Unlicense"
] | null | null | null |
stars/test/test_weapon.py
|
therussellhome/inherit_the_stars
|
aa498103e1683625f762afee8c95ffcebb77ca03
|
[
"Unlicense"
] | null | null | null |
stars/test/test_weapon.py
|
therussellhome/inherit_the_stars
|
aa498103e1683625f762afee8c95ffcebb77ca03
|
[
"Unlicense"
] | null | null | null |
import unittest
from unittest.mock import patch
from .. import *
class WeaponTestCase(unittest.TestCase):
def test_accuracy1(self):
w = weapon.Weapon(range_tm=1)
self.assertEqual(w.get_accuracy(0), 100)
def test_accuracy2(self):
w = weapon.Weapon(range_tm=1)
self.assertEqual(w.get_accuracy(1), 0)
def test_accuracy3(self):
w = weapon.Weapon(range_tm=1, is_beam=False)
self.assertEqual(w.get_accuracy(0), 100)
def test_accuracy4(self):
w = weapon.Weapon(range_tm=1, is_beam=False)
self.assertEqual(w.get_accuracy(stars_math.TERAMETER_2_LIGHTYEAR), 0)
def test_accuracy5(self):
w = weapon.Weapon(range_tm=2, is_beam=False)
self.assertEqual(w.get_accuracy(stars_math.TERAMETER_2_LIGHTYEAR), 93.75)
def test_power1(self):
w = weapon.Weapon(power=100, range_tm=1)
self.assertEqual(w.get_power(0, 150, 0), (100, 0))
def test_power2(self):
w = weapon.Weapon(power=100, range_tm=1)
self.assertEqual(w.get_power(0, 50, 150), (50, 50))
def test_power3(self):
w = weapon.Weapon(power=100, range_tm=1)
self.assertEqual(w.get_power(0, 0, 150), (0, 100))
def test_power4(self):
w = weapon.Weapon(power=100, range_tm=1)
self.assertEqual(w.get_power(stars_math.TERAMETER_2_LIGHTYEAR, 0, 150), (0, 0))
def test_power5(self):
w = weapon.Weapon(power=100, range_tm=2)
self.assertEqual(w.get_power(stars_math.TERAMETER_2_LIGHTYEAR, 50, 150), (50, 0))
def test_power6(self):
w = weapon.Weapon(power=100, range_tm=1, armor_multiplier=2)
self.assertEqual(w.get_power(0, 50, 150), (50, 100))
def test_power7(self):
w = weapon.Weapon(power=100, range_tm=1, is_beam=False)
self.assertEqual(w.get_power(0, 50, 150), (50, 50))
def test_power8(self):
w = weapon.Weapon(power=100, range_tm=1, is_beam=False)
self.assertEqual(w.get_power(0, 150, 150), (75, 25))
def test_power9(self):
w = weapon.Weapon(power=100, range_tm=2, is_beam=False)
self.assertEqual(w.get_power(stars_math.TERAMETER_2_LIGHTYEAR, 50, 150), (50, 50))
def test_power10(self):
w = weapon.Weapon(power=100, range_tm=1, is_beam=False, armor_multiplier=2)
self.assertEqual(w.get_power(0, 150, 150), (75, 50))
def test_damage1(self):
w = weapon.Weapon(power=1, range_tm=1)
self.assertNotEqual(w.get_damage(0, 100, 100, 2000, 0), (0, 0))
def test_damage2(self):
w = weapon.Weapon(power=1, range_tm=1)
self.assertEqual(w.get_damage(1, 100, 100, 2000, 0), (0, 0))
def test_damage3(self):
with patch.object(weapon.Weapon, 'get_power', return_value=(1,1)) as mock:
w = weapon.Weapon(power=1, range_tm=2, is_beam=False)
for i in range(0, 100):
w.get_damage(stars_math.TERAMETER_2_LIGHTYEAR, 100, 100, 0, 0)
self.assertLess(mock.call_count, 100)
self.assertGreater(mock.call_count, 80)
def test_damage4(self):
with patch.object(weapon.Weapon, 'get_power', return_value=(1,1)) as mock:
w = weapon.Weapon(power=1, range_tm=2, is_beam=False)
for i in range(0, 100):
w.get_damage(stars_math.TERAMETER_2_LIGHTYEAR, 100, 100, 500, 100)
self.assertLess(mock.call_count, 25)
self.assertGreater(mock.call_count, 5)
| 38.311111
| 90
| 0.644142
| 530
| 3,448
| 4.007547
| 0.139623
| 0.118644
| 0.11629
| 0.136064
| 0.819209
| 0.765537
| 0.752354
| 0.743879
| 0.725047
| 0.622411
| 0
| 0.096283
| 0.219838
| 3,448
| 89
| 91
| 38.741573
| 0.693309
| 0
| 0
| 0.318841
| 0
| 0
| 0.00522
| 0
| 0
| 0
| 0
| 0
| 0.304348
| 1
| 0.275362
| false
| 0
| 0.043478
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
de4f828ee3171071e770eb324066aa6ccabe11f2
| 3,361
|
py
|
Python
|
linear_separation.py
|
MKaczkow/naive_bayes_classifier
|
305a8e95a93960e7cbd70660513a9546d0aeab99
|
[
"MIT"
] | null | null | null |
linear_separation.py
|
MKaczkow/naive_bayes_classifier
|
305a8e95a93960e7cbd70660513a9546d0aeab99
|
[
"MIT"
] | null | null | null |
linear_separation.py
|
MKaczkow/naive_bayes_classifier
|
305a8e95a93960e7cbd70660513a9546d0aeab99
|
[
"MIT"
] | null | null | null |
"""
Author: Maciej Kaczkowski
15.04-29.04.2021
"""
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# reading clean dataset
main_df = pd.read_csv(r'seeds_dataset_clean.txt', header=None, sep='\t')
main_df.columns = ['area', 'perimeter', 'compactness', 'kernel length',
'kernel width', 'asymmetry coef.', 'groove length', 'class']
# printing all plots
fig, axes = plt.subplots(2, 2, figsize=(10, 10))
sns.scatterplot(ax=axes[0, 0], data=main_df, x='asymmetry coef.', y='compactness', hue='class', legend=True)
sns.scatterplot(ax=axes[0, 1], data=main_df, x='asymmetry coef.', y='groove length', hue='class', legend=True)
sns.scatterplot(ax=axes[1, 0], data=main_df, x='asymmetry coef.', y='kernel length', hue='class', legend=True)
sns.scatterplot(ax=axes[1, 1], data=main_df, x='asymmetry coef.', y='kernel width', hue='class', legend=True)
fig, axes = plt.subplots(2, 2, figsize=(10, 10))
sns.scatterplot(ax=axes[0, 0], data=main_df, x='asymmetry coef.', y='perimeter', hue='class', legend=True)
sns.scatterplot(ax=axes[0, 1], data=main_df, x='asymmetry coef.', y='area', hue='class', legend=True)
sns.scatterplot(ax=axes[1, 0], data=main_df, x='compactness', y='groove length', hue='class', legend=True)
sns.scatterplot(ax=axes[1, 1], data=main_df, x='compactness', y='kernel width', hue='class', legend=True)
fig, axes = plt.subplots(2, 2, figsize=(10, 10))
sns.scatterplot(ax=axes[0, 0], data=main_df, y='compactness', x='kernel length', hue='class', legend=True)
sns.scatterplot(ax=axes[0, 1], data=main_df, y='compactness', x='area', hue='class', legend=True)
sns.scatterplot(ax=axes[1, 0], data=main_df, y='compactness', x='perimeter', hue='class', legend=True)
axes[1, 1].set_visible(False)
fig, axes = plt.subplots(2, 2, figsize=(10, 10))
sns.scatterplot(ax=axes[0, 0], data=main_df, x='groove length', y='kernel width', hue='class', legend=True)
sns.scatterplot(ax=axes[0, 1], data=main_df, x='groove length', y='kernel length', hue='class', legend=True)
sns.scatterplot(ax=axes[1, 0], data=main_df, x='groove length', y='area', hue='class', legend=True)
sns.scatterplot(ax=axes[1, 1], data=main_df, x='groove length', y='perimeter', hue='class', legend=True)
fig, axes = plt.subplots(2, 2, figsize=(10, 10))
sns.scatterplot(ax=axes[0, 0], data=main_df, x='kernel length', y='kernel width', hue='class', legend=True)
sns.scatterplot(ax=axes[0, 1], data=main_df, x='kernel length', y='area', hue='class', legend=True)
sns.scatterplot(ax=axes[1, 0], data=main_df, x='kernel length', y='perimeter', hue='class', legend=True)
axes[1, 1].set_visible(False)
fig, axes = plt.subplots(2, 2, figsize=(10, 10))
sns.scatterplot(ax=axes[0, 0], data=main_df, x='kernel width', y='perimeter', hue='class', legend=True)
sns.scatterplot(ax=axes[0, 1], data=main_df, x='kernel width', y='area', hue='class', legend=True)
sns.scatterplot(ax=axes[1, 0], data=main_df, x='area', y='perimeter', hue='class', legend=True)
axes[1, 1].set_visible(False)
# printing only the most promising plots
plt.figure(7)
sns.scatterplot(data=main_df, x='groove length', y='kernel length', hue='class', legend=True)
plt.figure(8)
sns.scatterplot(data=main_df, x='perimeter', y='area', hue='class', legend=True)
plt.figure(9)
sns.scatterplot(data=main_df, x='groove length', y='area', hue='class', legend=True)
plt.show()
| 48.710145
| 110
| 0.693246
| 568
| 3,361
| 4.045775
| 0.126761
| 0.067885
| 0.104439
| 0.18799
| 0.85074
| 0.85074
| 0.818103
| 0.785901
| 0.761097
| 0.748912
| 0
| 0.032716
| 0.099673
| 3,361
| 68
| 111
| 49.426471
| 0.726702
| 0.036596
| 0
| 0.209302
| 0
| 0
| 0.230793
| 0.007125
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.069767
| 0
| 0.069767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a0d12951711123882680a98b1303d57e2ab1d030
| 165
|
py
|
Python
|
schainpy/model/graphics/__init__.py
|
LuisRondoCuevas/schainpy
|
ef41efe03993a6ae56e587334a1bfc529fccc2df
|
[
"BSD-3-Clause"
] | null | null | null |
schainpy/model/graphics/__init__.py
|
LuisRondoCuevas/schainpy
|
ef41efe03993a6ae56e587334a1bfc529fccc2df
|
[
"BSD-3-Clause"
] | null | null | null |
schainpy/model/graphics/__init__.py
|
LuisRondoCuevas/schainpy
|
ef41efe03993a6ae56e587334a1bfc529fccc2df
|
[
"BSD-3-Clause"
] | null | null | null |
from .jroplot_voltage import *
from .jroplot_spectra import *
from .jroplot_heispectra import *
from .jroplot_correlation import *
from .jroplot_parameters import *
| 27.5
| 34
| 0.818182
| 20
| 165
| 6.5
| 0.4
| 0.423077
| 0.523077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 165
| 5
| 35
| 33
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9d425e3002cd7b76d496cd1e52e939b0810877dc
| 2,022
|
py
|
Python
|
src/bot/tests/spacestation/test_spacestations.py
|
ItsCalebJones/SpaceLaunchNow_API
|
09289068465c462557649172792ab0f41f833028
|
[
"Apache-2.0"
] | 11
|
2017-06-26T05:01:31.000Z
|
2019-09-13T18:48:27.000Z
|
src/bot/tests/spacestation/test_spacestations.py
|
ItsCalebJones/SpaceLaunchNow_API
|
09289068465c462557649172792ab0f41f833028
|
[
"Apache-2.0"
] | 14
|
2019-01-30T23:13:34.000Z
|
2019-10-08T10:43:36.000Z
|
src/bot/tests/spacestation/test_spacestations.py
|
ItsCalebJones/SpaceLaunchNow_API
|
09289068465c462557649172792ab0f41f833028
|
[
"Apache-2.0"
] | 5
|
2018-04-24T16:52:59.000Z
|
2018-08-22T14:06:01.000Z
|
import json
import unittest
from rest_framework import status
from api.models import SpaceStation
from api.tests.test__base import LLAPITests
from spacelaunchnow import settings
class SpaceStationTest(LLAPITests):
@unittest.skipIf(settings.IS_LL, "Not supported in this configuration.")
def test_v330_spacestations(self):
"""
Ensure spacestation endpoints work as expected.
"""
path = '/api/3.3.0/spacestation/'
response = self.client.get(path)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['count'], 1)
iss = SpaceStation.objects.get(name=data['results'][0]['name'])
self.assertEqual(data['results'][0]['name'], iss.name)
self.assertEqual(data['results'][0]['founded'], "1998-11-20")
self.assertEqual(data['results'][0]['description'], iss.description)
self.assertEqual(data['results'][0]['orbit'], iss.orbit.name)
@unittest.skipIf(settings.IS_LL, "Not supported in this configuration.")
def test_v330_spacestations_detailed(self):
"""
Ensure spacestation endpoints work as expected.
"""
path = '/api/3.3.0/spacestation/?mode=detailed'
response = self.client.get(path)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['count'], 1)
iss = SpaceStation.objects.get(name=data['results'][0]['name'])
self.assertEqual(data['results'][0]['name'], iss.name)
self.assertEqual(data['results'][0]['founded'], "1998-11-20")
self.assertEqual(data['results'][0]['description'], iss.description)
self.assertEqual(data['results'][0]['orbit'], iss.orbit.name)
self.assertIn('owners', data['results'][0])
self.assertIn('docked_vehicles', data['results'][0])
self.assertIn('active_expeditions', data['results'][0])
| 43.021277
| 76
| 0.66271
| 245
| 2,022
| 5.395918
| 0.293878
| 0.108169
| 0.118003
| 0.157337
| 0.800303
| 0.763994
| 0.763994
| 0.763994
| 0.763994
| 0.763994
| 0
| 0.030723
| 0.179031
| 2,022
| 46
| 77
| 43.956522
| 0.765663
| 0.046983
| 0
| 0.588235
| 0
| 0
| 0.194681
| 0.032979
| 0
| 0
| 0
| 0
| 0.441176
| 1
| 0.058824
| false
| 0
| 0.176471
| 0
| 0.264706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5f22892f33369f6af65abe8c5822c91693184090
| 19
|
py
|
Python
|
untitled1/sunk/sunk/__init__.py
|
wuFANL/Python_1812_1
|
5bd34b00e1ba9e5381263852af165c9430556795
|
[
"MIT"
] | null | null | null |
untitled1/sunk/sunk/__init__.py
|
wuFANL/Python_1812_1
|
5bd34b00e1ba9e5381263852af165c9430556795
|
[
"MIT"
] | null | null | null |
untitled1/sunk/sunk/__init__.py
|
wuFANL/Python_1812_1
|
5bd34b00e1ba9e5381263852af165c9430556795
|
[
"MIT"
] | null | null | null |
import pymysql
| 3.166667
| 14
| 0.684211
| 2
| 19
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 19
| 5
| 15
| 3.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
264393c5102903efaa9e194811e2808443a76166
| 242
|
py
|
Python
|
mlapi/model/question_result.py
|
CoveoWhisper/MLAPI
|
9c616ff173e8083f581d73c7130249ab70c0f783
|
[
"MIT"
] | null | null | null |
mlapi/model/question_result.py
|
CoveoWhisper/MLAPI
|
9c616ff173e8083f581d73c7130249ab70c0f783
|
[
"MIT"
] | 3
|
2018-10-20T23:10:14.000Z
|
2018-12-15T00:44:49.000Z
|
mlapi/model/question_result.py
|
CoveoWhisper/MLAPI
|
9c616ff173e8083f581d73c7130249ab70c0f783
|
[
"MIT"
] | 1
|
2018-11-12T14:38:09.000Z
|
2018-11-12T14:38:09.000Z
|
class QuestionResult(object):
def __init__(self, question, score):
self.question = question
self.score = score
def to_dict(self):
return {"facetQuestion": self.question,
"score": self.score}
| 22
| 47
| 0.603306
| 25
| 242
| 5.64
| 0.48
| 0.255319
| 0.241135
| 0.297872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.289256
| 242
| 10
| 48
| 24.2
| 0.819767
| 0
| 0
| 0
| 0
| 0
| 0.074689
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
2650ee362a6c5fde9e100d2f94369f3fb512db8a
| 384
|
py
|
Python
|
test_reversed_sentence.py
|
akanksha234/Python-Interview-Questions
|
431913628fbef0c9e503cf1915136fae21e8b023
|
[
"MIT"
] | null | null | null |
test_reversed_sentence.py
|
akanksha234/Python-Interview-Questions
|
431913628fbef0c9e503cf1915136fae21e8b023
|
[
"MIT"
] | null | null | null |
test_reversed_sentence.py
|
akanksha234/Python-Interview-Questions
|
431913628fbef0c9e503cf1915136fae21e8b023
|
[
"MIT"
] | null | null | null |
from sentence_reversal import reverse_sentence
def test_1():
assert(reverse_sentence(' space before')== 'before space')
def test_2():
assert(reverse_sentence('space after ')== 'after space')
def test_3():
assert(reverse_sentence(' Hello John how are you ')== 'you are how John Hello')
def test_4():
assert(reverse_sentence('1')== '1')
| 25.6
| 91
| 0.643229
| 50
| 384
| 4.74
| 0.4
| 0.316456
| 0.35443
| 0.219409
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020067
| 0.221354
| 384
| 14
| 92
| 27.428571
| 0.772575
| 0
| 0
| 0
| 0
| 0
| 0.297297
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.444444
| true
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
cd24b10ed437d844a822c79d3b79774588647e1b
| 389
|
py
|
Python
|
dataschema/__init__.py
|
ashafer01/python-dataschema
|
c732f8ec1550b7148e72c637c090b267694ac6bb
|
[
"MIT"
] | null | null | null |
dataschema/__init__.py
|
ashafer01/python-dataschema
|
c732f8ec1550b7148e72c637c090b267694ac6bb
|
[
"MIT"
] | null | null | null |
dataschema/__init__.py
|
ashafer01/python-dataschema
|
c732f8ec1550b7148e72c637c090b267694ac6bb
|
[
"MIT"
] | null | null | null |
from .base import Spec
from .specs import (
Type,
CType,
EnumSpec,
TypeSpec,
IterSpec,
SeqSpec,
DictSpec,
ConditionalDictSpec,
Post,
Update,
Test,
)
__all__ = (
'Spec',
'Type',
'CType',
'EnumSpec',
'TypeSpec',
'IterSpec',
'SeqSpec',
'DictSpec',
'ConditionalDictSpec',
'Post',
'Update',
'Test',
)
| 12.966667
| 26
| 0.529563
| 31
| 389
| 6.516129
| 0.548387
| 0.089109
| 0.168317
| 0.247525
| 0.80198
| 0.80198
| 0.80198
| 0.80198
| 0.80198
| 0.80198
| 0
| 0
| 0.33162
| 389
| 29
| 27
| 13.413793
| 0.776923
| 0
| 0
| 0
| 0
| 0
| 0.218509
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cd531049a286124beb10550163f779a7cb3843da
| 1,673
|
py
|
Python
|
tests/test_postprocessor_configuration.py
|
Jomcgi/scrubadub
|
99ec3bf7027cd6c2a7c6b8f0dd8c7bd9fc12ee7d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_postprocessor_configuration.py
|
Jomcgi/scrubadub
|
99ec3bf7027cd6c2a7c6b8f0dd8c7bd9fc12ee7d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_postprocessor_configuration.py
|
Jomcgi/scrubadub
|
99ec3bf7027cd6c2a7c6b8f0dd8c7bd9fc12ee7d
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import scrubadub
class PostProcessorConfigTestCase(unittest.TestCase):
def test_register_detector(self):
class NewPostProcessor(scrubadub.post_processors.PostProcessor):
name = 'new_post_processor'
scrubadub.post_processors.register_post_processor(NewPostProcessor, False, -1)
self.assertTrue(NewPostProcessor.name in scrubadub.post_processors.post_processor_configuration)
self.assertEqual(scrubadub.post_processors.post_processor_configuration[NewPostProcessor.name]['autoload'], False)
self.assertEqual(scrubadub.post_processors.post_processor_configuration[NewPostProcessor.name]['index'], -1)
self.assertEqual(scrubadub.post_processors.post_processor_configuration[NewPostProcessor.name]['post_processor'], NewPostProcessor)
scrubadub.post_processors.post_processor_configuration.pop(NewPostProcessor.name)
self.assertTrue(NewPostProcessor.name not in scrubadub.post_processors.post_processor_configuration)
scrubadub.post_processors.register_post_processor(NewPostProcessor, True, 0)
self.assertTrue(NewPostProcessor.name in scrubadub.post_processors.post_processor_configuration)
self.assertEqual(scrubadub.post_processors.post_processor_configuration[NewPostProcessor.name]['autoload'], True)
self.assertEqual(scrubadub.post_processors.post_processor_configuration[NewPostProcessor.name]['index'], 0)
self.assertEqual(scrubadub.post_processors.post_processor_configuration[NewPostProcessor.name]['post_processor'], NewPostProcessor)
scrubadub.post_processors.post_processor_configuration.pop(NewPostProcessor.name)
| 64.346154
| 139
| 0.812313
| 167
| 1,673
| 7.862275
| 0.179641
| 0.158416
| 0.24524
| 0.2262
| 0.817212
| 0.817212
| 0.817212
| 0.686976
| 0.686976
| 0.686976
| 0
| 0.002674
| 0.105798
| 1,673
| 25
| 140
| 66.92
| 0.875
| 0
| 0
| 0.315789
| 0
| 0
| 0.043036
| 0
| 0
| 0
| 0
| 0
| 0.473684
| 1
| 0.052632
| false
| 0
| 0.105263
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cd62fc2ca6c9fd7c2e3c4e8a04bc3abaa986bd35
| 10,419
|
py
|
Python
|
test/test_repositories_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
test/test_repositories_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
test/test_repositories_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
import unittest
from gitee.api.repositories_api import RepositoriesApi # noqa: E501
class TestRepositoriesApi(unittest.TestCase):
"""RepositoriesApi unit test stubs"""
def setUp(self):
self.api = RepositoriesApi() # noqa: E501
def tearDown(self):
pass
def test_delete_v5_repos_owner_repo(self):
"""Test case for delete_v5_repos_owner_repo
删除一个仓库 # noqa: E501
"""
pass
def test_delete_v5_repos_owner_repo_branches_branch_protection(self):
"""Test case for delete_v5_repos_owner_repo_branches_branch_protection
取消保护分支的设置 # noqa: E501
"""
pass
def test_delete_v5_repos_owner_repo_collaborators_username(self):
"""Test case for delete_v5_repos_owner_repo_collaborators_username
移除仓库成员 # noqa: E501
"""
pass
def test_delete_v5_repos_owner_repo_comments_id(self):
"""Test case for delete_v5_repos_owner_repo_comments_id
删除Commit评论 # noqa: E501
"""
pass
def test_delete_v5_repos_owner_repo_contents_path(self):
"""Test case for delete_v5_repos_owner_repo_contents_path
删除文件 # noqa: E501
"""
pass
def test_delete_v5_repos_owner_repo_keys_enable_id(self):
"""Test case for delete_v5_repos_owner_repo_keys_enable_id
停用仓库公钥 # noqa: E501
"""
pass
def test_delete_v5_repos_owner_repo_keys_id(self):
"""Test case for delete_v5_repos_owner_repo_keys_id
删除一个仓库公钥 # noqa: E501
"""
pass
def test_delete_v5_repos_owner_repo_releases_id(self):
"""Test case for delete_v5_repos_owner_repo_releases_id
删除仓库Release # noqa: E501
"""
pass
def test_get_v5_enterprises_enterprise_repos(self):
"""Test case for get_v5_enterprises_enterprise_repos
获取企业的所有仓库 # noqa: E501
"""
pass
def test_get_v5_orgs_org_repos(self):
"""Test case for get_v5_orgs_org_repos
获取一个组织的仓库 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo(self):
"""Test case for get_v5_repos_owner_repo
获取用户的某个仓库 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_branches(self):
"""Test case for get_v5_repos_owner_repo_branches
获取所有分支 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_branches_branch(self):
"""Test case for get_v5_repos_owner_repo_branches_branch
获取单个分支 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_collaborators(self):
"""Test case for get_v5_repos_owner_repo_collaborators
获取仓库的所有成员 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_collaborators_username(self):
"""Test case for get_v5_repos_owner_repo_collaborators_username
判断用户是否为仓库成员 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_collaborators_username_permission(self):
"""Test case for get_v5_repos_owner_repo_collaborators_username_permission
查看仓库成员的权限 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_comments(self):
"""Test case for get_v5_repos_owner_repo_comments
获取仓库的Commit评论 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_comments_id(self):
"""Test case for get_v5_repos_owner_repo_comments_id
获取仓库的某条Commit评论 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_commits(self):
"""Test case for get_v5_repos_owner_repo_commits
仓库的所有提交 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_commits_ref_comments(self):
"""Test case for get_v5_repos_owner_repo_commits_ref_comments
获取单个Commit的评论 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_commits_sha(self):
"""Test case for get_v5_repos_owner_repo_commits_sha
仓库的某个提交 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_compare_base___head(self):
"""Test case for get_v5_repos_owner_repo_compare_base___head
两个Commits之间对比的版本差异 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_contents_path(self):
"""Test case for get_v5_repos_owner_repo_contents_path
获取仓库具体路径下的内容 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_contributors(self):
"""Test case for get_v5_repos_owner_repo_contributors
获取仓库贡献者 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_forks(self):
"""Test case for get_v5_repos_owner_repo_forks
查看仓库的Forks # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_keys(self):
"""Test case for get_v5_repos_owner_repo_keys
获取仓库已部署的公钥 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_keys_available(self):
"""Test case for get_v5_repos_owner_repo_keys_available
获取仓库可部署的公钥 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_keys_id(self):
"""Test case for get_v5_repos_owner_repo_keys_id
获取仓库的单个公钥 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_pages(self):
"""Test case for get_v5_repos_owner_repo_pages
获取Pages信息 # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_readme(self):
"""Test case for get_v5_repos_owner_repo_readme
获取仓库README # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_releases(self):
"""Test case for get_v5_repos_owner_repo_releases
获取仓库的所有Releases # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_releases_id(self):
"""Test case for get_v5_repos_owner_repo_releases_id
获取仓库的单个Releases # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_releases_latest(self):
"""Test case for get_v5_repos_owner_repo_releases_latest
获取仓库的最后更新的Release # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_releases_tags_tag(self):
"""Test case for get_v5_repos_owner_repo_releases_tags_tag
根据Tag名称获取仓库的Release # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_tags(self):
"""Test case for get_v5_repos_owner_repo_tags
列出仓库所有的tags # noqa: E501
"""
pass
def test_get_v5_user_repos(self):
"""Test case for get_v5_user_repos
列出授权用户的所有仓库 # noqa: E501
"""
pass
def test_get_v5_users_username_repos(self):
"""Test case for get_v5_users_username_repos
获取某个用户的公开仓库 # noqa: E501
"""
pass
def test_patch_v5_repos_owner_repo(self):
"""Test case for patch_v5_repos_owner_repo
更新仓库设置 # noqa: E501
"""
pass
def test_patch_v5_repos_owner_repo_comments_id(self):
"""Test case for patch_v5_repos_owner_repo_comments_id
更新Commit评论 # noqa: E501
"""
pass
def test_patch_v5_repos_owner_repo_releases_id(self):
"""Test case for patch_v5_repos_owner_repo_releases_id
更新仓库Release # noqa: E501
"""
pass
def test_post_v5_enterprises_enterprise_repos(self):
"""Test case for post_v5_enterprises_enterprise_repos
创建企业仓库 # noqa: E501
"""
pass
def test_post_v5_orgs_org_repos(self):
"""Test case for post_v5_orgs_org_repos
创建组织仓库 # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_branches(self):
"""Test case for post_v5_repos_owner_repo_branches
创建分支 # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_commits_sha_comments(self):
"""Test case for post_v5_repos_owner_repo_commits_sha_comments
创建Commit评论 # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_contents_path(self):
"""Test case for post_v5_repos_owner_repo_contents_path
新建文件 # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_forks(self):
"""Test case for post_v5_repos_owner_repo_forks
Fork一个仓库 # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_keys(self):
"""Test case for post_v5_repos_owner_repo_keys
为仓库添加公钥 # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_pages_builds(self):
"""Test case for post_v5_repos_owner_repo_pages_builds
请求建立Pages # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_releases(self):
"""Test case for post_v5_repos_owner_repo_releases
创建仓库Release # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_tags(self):
"""Test case for post_v5_repos_owner_repo_tags
创建一个仓库的 Tag # noqa: E501
"""
pass
def test_post_v5_user_repos(self):
"""Test case for post_v5_user_repos
创建一个仓库 # noqa: E501
"""
pass
def test_put_v5_repos_owner_repo_branches_branch_protection(self):
"""Test case for put_v5_repos_owner_repo_branches_branch_protection
设置分支保护 # noqa: E501
"""
pass
def test_put_v5_repos_owner_repo_clear(self):
"""Test case for put_v5_repos_owner_repo_clear
清空一个仓库 # noqa: E501
"""
pass
def test_put_v5_repos_owner_repo_collaborators_username(self):
"""Test case for put_v5_repos_owner_repo_collaborators_username
添加仓库成员 # noqa: E501
"""
pass
def test_put_v5_repos_owner_repo_contents_path(self):
"""Test case for put_v5_repos_owner_repo_contents_path
更新文件 # noqa: E501
"""
pass
def test_put_v5_repos_owner_repo_keys_enable_id(self):
"""Test case for put_v5_repos_owner_repo_keys_enable_id
启用仓库公钥 # noqa: E501
"""
pass
def test_put_v5_repos_owner_repo_reviewer(self):
"""Test case for put_v5_repos_owner_repo_reviewer
修改代码审查设置 # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 24.689573
| 82
| 0.649295
| 1,351
| 10,419
| 4.491488
| 0.097705
| 0.115359
| 0.197759
| 0.263678
| 0.866842
| 0.849209
| 0.831081
| 0.752472
| 0.693309
| 0.554384
| 0
| 0.039305
| 0.286976
| 10,419
| 421
| 83
| 24.748219
| 0.777494
| 0.416931
| 0
| 0.467742
| 0
| 0
| 0.001662
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.475806
| false
| 0.467742
| 0.024194
| 0
| 0.508065
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
f80375e21de84baf2b1082bf6226e7c19b36a8f8
| 5,060
|
py
|
Python
|
sacorg/tests/test_simple.py
|
abdcelikkanat/sacorg
|
8501b0825ef6bbc705a2c34d3aa8799265f4ecc7
|
[
"Apache-2.0"
] | null | null | null |
sacorg/tests/test_simple.py
|
abdcelikkanat/sacorg
|
8501b0825ef6bbc705a2c34d3aa8799265f4ecc7
|
[
"Apache-2.0"
] | null | null | null |
sacorg/tests/test_simple.py
|
abdcelikkanat/sacorg
|
8501b0825ef6bbc705a2c34d3aa8799265f4ecc7
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from sacorg.algorithms import simple
from sacorg.utils import *
class Test(unittest.TestCase):
def test_myalg_2regular(self):
""" Test the values for 2-regular graphs """
# www.oeis.org
# A001205 Number of undirected 2-regular labeled graphs
actual_results = [1, 0, 0, 1, 3, 12, 70, 465, 3507, 30016, 286884, 3026655, 34944085, 438263364, 5933502822,
86248951243, 1339751921865, 22148051088480, 388246725873208, 7193423109763089,
140462355821628771, 2883013994348484940]
for n in range(0, len(actual_results)):
seq = np.ones(n, dtype=np.int)*2
computed_result = simple.count(deg_seq=seq, method="C", verbose=False)
self.assertEqual(actual_results[n], computed_result)
def test_myalg_3regular(self):
""" Test the values for 3-regular graphs """
# www.oeis.org
# A002829 Number of trivalent (or cubic) labeled graphs with 2n nodes
actual_results = [1, 0, 1, 70, 19355, 11180820, 11555272575, 19506631814670, 50262958713792825,
187747837889699887800, 976273961160363172131825, 6840300875426184026353242750,
62870315446244013091262178375075, 741227949070136911068308523257857500]
for n in range(0, len(actual_results)):
seq = np.ones(2*n, dtype=np.int)*3
computed_result = simple.count(deg_seq=seq, method="C", verbose=False)
self.assertEqual(actual_results[n], computed_result)
def test_myalg_4regular(self):
""" Test the values for 4-regular graphs """
# www.oeis.org
# A005815 Number of 4-valent labeled graphs with n nodes.
actual_results = [1, 0, 0, 0, 0, 1, 15, 465, 19355, 1024380, 66462606, 5188453830, 480413921130,
52113376310985, 6551246596501035, 945313907253606891, 155243722248524067795,
28797220460586826422720]
for n in range(0, len(actual_results)):
seq = np.ones(n, dtype=np.int)*4
computed_result = simple.count(deg_seq=seq, method="C", verbose=False)
self.assertEqual(actual_results[n], computed_result)
def test_myalg_arbitrary1(self):
""" Test an arbitrary sequence """
seq = np.asarray([2, 2, 2])
result = simple.count(deg_seq=seq, method="C", verbose=False)
self.assertEqual(result, 1)
def test_myalg_arbitrary2(self):
""" Test an arbitrary sequence """
seq = np.asarray([2,2,3,1,7,2,5,3,5])
result = simple.count(deg_seq=seq, method="C", verbose=False)
self.assertEqual(result, 215)
def test_myalg_arbitrary3(self):
""" Test an arbitrary sequence """
seq = np.asarray([4,6,6,5,2,1,3,8,4,1,4])
result = simple.count(deg_seq=seq, method="C", verbose=False)
self.assertEqual(result, 117697)
def test_myalg_arbitrary4(self):
""" Test an arbitrary sequence """
seq = np.asarray([4,2,5,2,2,3])
result = simple.count(deg_seq=seq, method="C", verbose=False)
self.assertEqual(result, 3)
def test_myalg_arbitrary5(self):
""" Test an arbitrary sequence """
seq = np.asarray([2,4,2,4,5,4,1])
result = simple.count(deg_seq=seq, method="C", verbose=False)
self.assertEqual(result, 12)
def test_blitzstein_diaconis_2regular(self):
""" Test the values for 2-regular graphs """
# www.oeis.org
# A001205 Number of undirected 2-regular labeled graphs
actual_results = [1, 0, 0, 1, 3, 12, 70, 465, 3507, 30016, 286884, 3026655, 34944085, 438263364, 5933502822,
86248951243, 1339751921865, 22148051088480, 388246725873208, 7193423109763089,
140462355821628771, 2883013994348484940]
for n in range(0, len(actual_results)):
seq = np.ones(n, dtype=np.int)*2
estimate, std = simple.count(deg_seq=seq, num_of_samples=1000, method="BD", verbose=False)
print "Actual : " + str(actual_results[n]) + " Estimate : " + str(estimate) + " Std : " + str(std)
def test_blitzstein_diaconis_3regular(self):
""" Test the values for 3-regular graphs """
# www.oeis.org
# A002829 Number of trivalent (or cubic) labeled graphs with 2n nodes
actual_results = [1, 0, 1, 70, 19355, 11180820, 11555272575, 19506631814670, 50262958713792825,
187747837889699887800, 976273961160363172131825, 6840300875426184026353242750,
62870315446244013091262178375075, 741227949070136911068308523257857500]
for n in range(0, len(actual_results)):
seq = np.ones(2*n, dtype=np.int)*3
estimate, std = simple.count(deg_seq=seq, num_of_samples=1000, method="BD", verbose=False)
print "Actual : " + str(actual_results[n]) + " Estimate : " + str(estimate) + " Std : " + str(std)
if __name__ == "__main__":
unittest.main()
| 45.178571
| 116
| 0.629051
| 601
| 5,060
| 5.18802
| 0.212978
| 0.06254
| 0.044901
| 0.054522
| 0.837075
| 0.816549
| 0.816549
| 0.816549
| 0.816549
| 0.778063
| 0
| 0.266525
| 0.258498
| 5,060
| 112
| 117
| 45.178571
| 0.564499
| 0.071937
| 0
| 0.545455
| 0
| 0
| 0.017691
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 0
| null | null | 0
| 0.045455
| null | null | 0.030303
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3e10564ea194ae8f3160044229bd9014dc0e2163
| 7,297
|
py
|
Python
|
anvil/async_test.py
|
suraj-testing2/Crab_Floppy_Disk
|
01f6f89426308904cc5eaf9f7ab64038f84ea191
|
[
"ECL-2.0",
"Apache-2.0"
] | 27
|
2015-02-18T22:38:05.000Z
|
2021-05-09T15:17:05.000Z
|
anvil/async_test.py
|
suraj-testing2/Crab_Floppy_Disk
|
01f6f89426308904cc5eaf9f7ab64038f84ea191
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2015-05-17T22:42:13.000Z
|
2017-05-01T18:31:48.000Z
|
anvil/async_test.py
|
suraj-testing2/Crab_Floppy_Disk
|
01f6f89426308904cc5eaf9f7ab64038f84ea191
|
[
"ECL-2.0",
"Apache-2.0"
] | 16
|
2015-05-06T20:28:06.000Z
|
2021-10-13T22:27:21.000Z
|
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
"""Tests for the async module.
"""
__author__ = 'benvanik@google.com (Ben Vanik)'
import unittest2
from anvil.async import Deferred, gather_deferreds
from anvil.test import AsyncTestCase
class DeferredTest(unittest2.TestCase):
"""Behavioral tests of the Deferred type."""
def testMultiCall(self):
d = Deferred()
d.callback()
with self.assertRaises(AssertionError):
d.callback()
d = Deferred()
d.errback()
with self.assertRaises(AssertionError):
d.errback()
d = Deferred()
d.callback()
with self.assertRaises(AssertionError):
d.errback()
d = Deferred()
d.errback()
with self.assertRaises(AssertionError):
d.callback()
def testCallbackArgs(self):
cb = {}
def cb_thunk(*args, **kwargs):
cb['done'] = True
cb['args'] = args
cb['kwargs'] = kwargs
d = Deferred()
self.assertFalse(d.is_done())
d.callback()
self.assertTrue(d.is_done())
d = Deferred()
self.assertFalse(d.is_done())
d.errback()
self.assertTrue(d.is_done())
d = Deferred()
d.add_callback_fn(cb_thunk)
d.callback()
self.assertNotEqual(len(cb), 0)
self.assertTrue(cb['done'])
self.assertEqual(len(cb['args']), 0)
self.assertEqual(len(cb['kwargs']), 0)
cb.clear()
d = Deferred()
d.add_callback_fn(cb_thunk)
d.callback('a', 'b')
self.assertNotEqual(len(cb), 0)
self.assertTrue(cb['done'])
self.assertEqual(len(cb['args']), 2)
self.assertEqual(cb['args'][0], 'a')
self.assertEqual(cb['args'][1], 'b')
self.assertEqual(len(cb['kwargs']), 0)
cb.clear()
d = Deferred()
d.add_callback_fn(cb_thunk)
d.callback('a', b='b')
self.assertNotEqual(len(cb), 0)
self.assertTrue(cb['done'])
self.assertEqual(len(cb['args']), 1)
self.assertEqual(cb['args'][0], 'a')
self.assertEqual(len(cb['kwargs']), 1)
self.assertEqual(cb['kwargs']['b'], 'b')
cb.clear()
def testCallbackOrder(self):
cb = {}
def cb_thunk(*args, **kwargs):
cb['done'] = True
cb['args'] = args
cb['kwargs'] = kwargs
d = Deferred()
d.add_callback_fn(cb_thunk)
d.callback('a')
self.assertNotEqual(len(cb), 0)
self.assertTrue(cb['done'])
self.assertEqual(len(cb['args']), 1)
self.assertEqual(cb['args'][0], 'a')
self.assertEqual(len(cb['kwargs']), 0)
cb.clear()
d = Deferred()
d.callback('a')
d.add_callback_fn(cb_thunk)
self.assertNotEqual(len(cb), 0)
self.assertTrue(cb['done'])
self.assertEqual(len(cb['args']), 1)
self.assertEqual(cb['args'][0], 'a')
self.assertEqual(len(cb['kwargs']), 0)
cb.clear()
d = Deferred()
d.add_errback_fn(cb_thunk)
d.errback('a')
self.assertNotEqual(len(cb), 0)
self.assertTrue(cb['done'])
self.assertEqual(len(cb['args']), 1)
self.assertEqual(cb['args'][0], 'a')
self.assertEqual(len(cb['kwargs']), 0)
cb.clear()
d = Deferred()
d.errback('a')
d.add_errback_fn(cb_thunk)
self.assertNotEqual(len(cb), 0)
self.assertTrue(cb['done'])
self.assertEqual(len(cb['args']), 1)
self.assertEqual(cb['args'][0], 'a')
self.assertEqual(len(cb['kwargs']), 0)
cb.clear()
d = Deferred()
d.add_callback_fn(cb_thunk)
d.errback('a')
self.assertEqual(len(cb), 0)
cb.clear()
d = Deferred()
d.errback('a')
d.add_callback_fn(cb_thunk)
self.assertEqual(len(cb), 0)
cb.clear()
d = Deferred()
d.add_errback_fn(cb_thunk)
d.callback('a')
self.assertEqual(len(cb), 0)
cb.clear()
d = Deferred()
d.callback('a')
d.add_errback_fn(cb_thunk)
self.assertEqual(len(cb), 0)
cb.clear()
def testMultiCallbacks(self):
cbs = []
def cb_multi_thunk(*args, **kwargs):
cbs.append({
'done': True,
'args': args,
'kwargs': kwargs
})
d = Deferred()
d.add_callback_fn(cb_multi_thunk)
d.callback('a')
self.assertEqual(len(cbs), 1)
self.assertNotEqual(len(cbs[0]), 0)
self.assertEqual(cbs[0]['args'][0], 'a')
cbs[:] = []
d = Deferred()
d.add_callback_fn(cb_multi_thunk)
d.add_callback_fn(cb_multi_thunk)
d.callback('a')
self.assertEqual(len(cbs), 2)
self.assertNotEqual(len(cbs[0]), 0)
self.assertNotEqual(len(cbs[1]), 0)
self.assertEqual(cbs[0]['args'][0], 'a')
self.assertEqual(cbs[1]['args'][0], 'a')
cbs[:] = []
d = Deferred()
d.add_callback_fn(cb_multi_thunk)
d.callback('a')
d.add_callback_fn(cb_multi_thunk)
self.assertEqual(len(cbs), 2)
self.assertNotEqual(len(cbs[0]), 0)
self.assertNotEqual(len(cbs[1]), 0)
self.assertEqual(cbs[0]['args'][0], 'a')
self.assertEqual(cbs[1]['args'][0], 'a')
cbs[:] = []
d = Deferred()
d.callback('a')
d.add_callback_fn(cb_multi_thunk)
d.add_callback_fn(cb_multi_thunk)
self.assertEqual(len(cbs), 2)
self.assertNotEqual(len(cbs[0]), 0)
self.assertNotEqual(len(cbs[1]), 0)
self.assertEqual(cbs[0]['args'][0], 'a')
self.assertEqual(cbs[1]['args'][0], 'a')
cbs[:] = []
class GatherTest(AsyncTestCase):
"""Behavioral tests for the async gather function."""
def testGather(self):
d = gather_deferreds([])
self.assertCallbackEqual(d, [])
da = Deferred()
db = Deferred()
dc = Deferred()
df = Deferred()
d = gather_deferreds([da, db, dc, df])
df.errback()
dc.callback('c')
db.callback('b')
da.callback('a')
self.assertCallbackEqual(d, [
(True, ('a',), {}),
(True, ('b',), {}),
(True, ('c',), {}),
(False, (), {})])
da = Deferred()
db = Deferred()
dc = Deferred()
df = Deferred()
df.errback('f')
dc.callback('c')
d = gather_deferreds([da, db, dc, df])
db.callback('b')
da.callback('a')
self.assertCallbackEqual(d, [
(True, ('a',), {}),
(True, ('b',), {}),
(True, ('c',), {}),
(False, ('f',), {})])
def testErrback(self):
d = gather_deferreds([], errback_if_any_fail=True)
self.assertCallbackEqual(d, [])
da = Deferred()
db = Deferred()
dc = Deferred()
d = gather_deferreds([da, db, dc], errback_if_any_fail=True)
dc.callback('c')
db.callback('b')
da.callback('a')
self.assertCallbackEqual(d, [
(True, ('a',), {}),
(True, ('b',), {}),
(True, ('c',), {})])
da = Deferred()
db = Deferred()
dc = Deferred()
df = Deferred()
d = gather_deferreds([da, db, dc, df], errback_if_any_fail=True)
df.errback()
dc.callback('c')
db.callback('b')
da.callback('a')
self.assertErrbackEqual(d, [
(True, ('a',), {}),
(True, ('b',), {}),
(True, ('c',), {}),
(False, (), {})])
da = Deferred()
db = Deferred()
dc = Deferred()
df = Deferred()
df.errback('f')
dc.callback('c')
d = gather_deferreds([da, db, dc, df], errback_if_any_fail=True)
db.callback('b')
da.callback('a')
self.assertErrbackEqual(d, [
(True, ('a',), {}),
(True, ('b',), {}),
(True, ('c',), {}),
(False, ('f',), {})])
if __name__ == '__main__':
unittest2.main()
| 24.819728
| 68
| 0.571605
| 945
| 7,297
| 4.306878
| 0.093122
| 0.136364
| 0.097297
| 0.088452
| 0.84914
| 0.839312
| 0.835381
| 0.808108
| 0.774939
| 0.697297
| 0
| 0.012593
| 0.227354
| 7,297
| 293
| 69
| 24.904437
| 0.709294
| 0.008771
| 0
| 0.866397
| 0
| 0
| 0.043239
| 0
| 0
| 0
| 0
| 0
| 0.295547
| 0
| null | null | 0
| 0.012146
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3e6b2d2b9c9edc49e92a65a91d93bd4b2b739fe7
| 298,943
|
py
|
Python
|
tests/unit/gapic/compute_v1/test_instances.py
|
georgiyekkert/python-compute
|
d128efbb3bf10af9b41e55b20aaa8080b3221e77
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/compute_v1/test_instances.py
|
georgiyekkert/python-compute
|
d128efbb3bf10af9b41e55b20aaa8080b3221e77
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/compute_v1/test_instances.py
|
georgiyekkert/python-compute
|
d128efbb3bf10af9b41e55b20aaa8080b3221e77
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from requests import Response
from requests import Request
from requests.sessions import Session
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.compute_v1.services.instances import InstancesClient
from google.cloud.compute_v1.services.instances import pagers
from google.cloud.compute_v1.services.instances import transports
from google.cloud.compute_v1.types import compute
from google.oauth2 import service_account
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert InstancesClient._get_default_mtls_endpoint(None) is None
assert InstancesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert (
InstancesClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
InstancesClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
InstancesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert InstancesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [InstancesClient,])
def test_instances_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name", [(transports.InstancesRestTransport, "rest"),]
)
def test_instances_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize("client_class", [InstancesClient,])
def test_instances_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
def test_instances_client_get_transport_class():
transport = InstancesClient.get_transport_class()
available_transports = [
transports.InstancesRestTransport,
]
assert transport in available_transports
transport = InstancesClient.get_transport_class("rest")
assert transport == transports.InstancesRestTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(InstancesClient, transports.InstancesRestTransport, "rest"),],
)
@mock.patch.object(
InstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstancesClient)
)
def test_instances_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(InstancesClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(InstancesClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(InstancesClient, transports.InstancesRestTransport, "rest", "true"),
(InstancesClient, transports.InstancesRestTransport, "rest", "false"),
],
)
@mock.patch.object(
InstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstancesClient)
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_instances_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(InstancesClient, transports.InstancesRestTransport, "rest"),],
)
def test_instances_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(InstancesClient, transports.InstancesRestTransport, "rest"),],
)
def test_instances_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_add_access_config_rest(
transport: str = "rest", request_type=compute.AddAccessConfigInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["access_config_resource"] = compute.AccessConfig(
external_ipv6="external_ipv6_value"
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.add_access_config(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_add_access_config_rest_bad_request(
transport: str = "rest", request_type=compute.AddAccessConfigInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["access_config_resource"] = compute.AccessConfig(
external_ipv6="external_ipv6_value"
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.add_access_config(request)
def test_add_access_config_rest_from_dict():
test_add_access_config_rest(request_type=dict)
def test_add_access_config_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
access_config_resource=compute.AccessConfig(
external_ipv6="external_ipv6_value"
),
)
mock_args.update(sample_request)
client.add_access_config(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig"
% client.transport._host,
args[1],
)
def test_add_access_config_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.add_access_config(
compute.AddAccessConfigInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
access_config_resource=compute.AccessConfig(
external_ipv6="external_ipv6_value"
),
)
def test_add_resource_policies_rest(
transport: str = "rest", request_type=compute.AddResourcePoliciesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_add_resource_policies_request_resource"
] = compute.InstancesAddResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.add_resource_policies(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_add_resource_policies_rest_bad_request(
transport: str = "rest", request_type=compute.AddResourcePoliciesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_add_resource_policies_request_resource"
] = compute.InstancesAddResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.add_resource_policies(request)
def test_add_resource_policies_rest_from_dict():
test_add_resource_policies_rest(request_type=dict)
def test_add_resource_policies_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_add_resource_policies_request_resource=compute.InstancesAddResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
),
)
mock_args.update(sample_request)
client.add_resource_policies(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addResourcePolicies"
% client.transport._host,
args[1],
)
def test_add_resource_policies_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.add_resource_policies(
compute.AddResourcePoliciesInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_add_resource_policies_request_resource=compute.InstancesAddResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
),
)
def test_aggregated_list_rest(
transport: str = "rest", request_type=compute.AggregatedListInstancesRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceAggregatedList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
unreachables=["unreachables_value"],
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceAggregatedList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.aggregated_list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.AggregatedListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
assert response.unreachables == ["unreachables_value"]
def test_aggregated_list_rest_bad_request(
transport: str = "rest", request_type=compute.AggregatedListInstancesRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.aggregated_list(request)
def test_aggregated_list_rest_from_dict():
test_aggregated_list_rest(request_type=dict)
def test_aggregated_list_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceAggregatedList()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceAggregatedList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(project="project_value",)
mock_args.update(sample_request)
client.aggregated_list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/aggregated/instances"
% client.transport._host,
args[1],
)
def test_aggregated_list_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.aggregated_list(
compute.AggregatedListInstancesRequest(), project="project_value",
)
def test_aggregated_list_rest_pager():
client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.InstanceAggregatedList(
items={
"a": compute.InstancesScopedList(),
"b": compute.InstancesScopedList(),
"c": compute.InstancesScopedList(),
},
next_page_token="abc",
),
compute.InstanceAggregatedList(items={}, next_page_token="def",),
compute.InstanceAggregatedList(
items={"g": compute.InstancesScopedList(),}, next_page_token="ghi",
),
compute.InstanceAggregatedList(
items={
"h": compute.InstancesScopedList(),
"i": compute.InstancesScopedList(),
},
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.InstanceAggregatedList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1"}
pager = client.aggregated_list(request=sample_request)
assert isinstance(pager.get("a"), compute.InstancesScopedList)
assert pager.get("h") is None
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, tuple) for i in results)
for result in results:
assert isinstance(result, tuple)
assert tuple(type(t) for t in result) == (str, compute.InstancesScopedList)
assert pager.get("a") is None
assert isinstance(pager.get("h"), compute.InstancesScopedList)
pages = list(client.aggregated_list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_attach_disk_rest(
transport: str = "rest", request_type=compute.AttachDiskInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["attached_disk_resource"] = compute.AttachedDisk(auto_delete=True)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.attach_disk(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_attach_disk_rest_bad_request(
transport: str = "rest", request_type=compute.AttachDiskInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["attached_disk_resource"] = compute.AttachedDisk(auto_delete=True)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.attach_disk(request)
def test_attach_disk_rest_from_dict():
test_attach_disk_rest(request_type=dict)
def test_attach_disk_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
attached_disk_resource=compute.AttachedDisk(auto_delete=True),
)
mock_args.update(sample_request)
client.attach_disk(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/attachDisk"
% client.transport._host,
args[1],
)
def test_attach_disk_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.attach_disk(
compute.AttachDiskInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
attached_disk_resource=compute.AttachedDisk(auto_delete=True),
)
def test_bulk_insert_rest(
transport: str = "rest", request_type=compute.BulkInsertInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request_init[
"bulk_insert_instance_resource_resource"
] = compute.BulkInsertInstanceResource(count=553)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.bulk_insert(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_bulk_insert_rest_bad_request(
transport: str = "rest", request_type=compute.BulkInsertInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request_init[
"bulk_insert_instance_resource_resource"
] = compute.BulkInsertInstanceResource(count=553)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.bulk_insert(request)
def test_bulk_insert_rest_from_dict():
test_bulk_insert_rest(request_type=dict)
def test_bulk_insert_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "zone": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(
count=553
),
)
mock_args.update(sample_request)
client.bulk_insert(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/bulkInsert"
% client.transport._host,
args[1],
)
def test_bulk_insert_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.bulk_insert(
compute.BulkInsertInstanceRequest(),
project="project_value",
zone="zone_value",
bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(
count=553
),
)
def test_delete_rest(
transport: str = "rest", request_type=compute.DeleteInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_delete_rest_bad_request(
transport: str = "rest", request_type=compute.DeleteInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete(request)
def test_delete_rest_from_dict():
test_delete_rest(request_type=dict)
def test_delete_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.delete(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}"
% client.transport._host,
args[1],
)
def test_delete_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete(
compute.DeleteInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_delete_access_config_rest(
transport: str = "rest", request_type=compute.DeleteAccessConfigInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_access_config(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_delete_access_config_rest_bad_request(
transport: str = "rest", request_type=compute.DeleteAccessConfigInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete_access_config(request)
def test_delete_access_config_rest_from_dict():
test_delete_access_config_rest(request_type=dict)
def test_delete_access_config_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
access_config="access_config_value",
network_interface="network_interface_value",
)
mock_args.update(sample_request)
client.delete_access_config(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig"
% client.transport._host,
args[1],
)
def test_delete_access_config_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_access_config(
compute.DeleteAccessConfigInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
access_config="access_config_value",
network_interface="network_interface_value",
)
def test_detach_disk_rest(
transport: str = "rest", request_type=compute.DetachDiskInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.detach_disk(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_detach_disk_rest_bad_request(
transport: str = "rest", request_type=compute.DetachDiskInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.detach_disk(request)
def test_detach_disk_rest_from_dict():
test_detach_disk_rest(request_type=dict)
def test_detach_disk_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
device_name="device_name_value",
)
mock_args.update(sample_request)
client.detach_disk(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/detachDisk"
% client.transport._host,
args[1],
)
def test_detach_disk_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.detach_disk(
compute.DetachDiskInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
device_name="device_name_value",
)
def test_get_rest(transport: str = "rest", request_type=compute.GetInstanceRequest):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Instance(
can_ip_forward=True,
cpu_platform="cpu_platform_value",
creation_timestamp="creation_timestamp_value",
deletion_protection=True,
description="description_value",
fingerprint="fingerprint_value",
hostname="hostname_value",
id=205,
kind="kind_value",
label_fingerprint="label_fingerprint_value",
last_start_timestamp="last_start_timestamp_value",
last_stop_timestamp="last_stop_timestamp_value",
last_suspended_timestamp="last_suspended_timestamp_value",
machine_type="machine_type_value",
min_cpu_platform="min_cpu_platform_value",
name="name_value",
private_ipv6_google_access=compute.Instance.PrivateIpv6GoogleAccess.ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE,
resource_policies=["resource_policies_value"],
satisfies_pzs=True,
self_link="self_link_value",
start_restricted=True,
status=compute.Instance.Status.DEPROVISIONING,
status_message="status_message_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Instance.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Instance)
assert response.can_ip_forward is True
assert response.cpu_platform == "cpu_platform_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.deletion_protection is True
assert response.description == "description_value"
assert response.fingerprint == "fingerprint_value"
assert response.hostname == "hostname_value"
assert response.id == 205
assert response.kind == "kind_value"
assert response.label_fingerprint == "label_fingerprint_value"
assert response.last_start_timestamp == "last_start_timestamp_value"
assert response.last_stop_timestamp == "last_stop_timestamp_value"
assert response.last_suspended_timestamp == "last_suspended_timestamp_value"
assert response.machine_type == "machine_type_value"
assert response.min_cpu_platform == "min_cpu_platform_value"
assert response.name == "name_value"
assert (
response.private_ipv6_google_access
== compute.Instance.PrivateIpv6GoogleAccess.ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE
)
assert response.resource_policies == ["resource_policies_value"]
assert response.satisfies_pzs is True
assert response.self_link == "self_link_value"
assert response.start_restricted is True
assert response.status == compute.Instance.Status.DEPROVISIONING
assert response.status_message == "status_message_value"
assert response.zone == "zone_value"
def test_get_rest_bad_request(
transport: str = "rest", request_type=compute.GetInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get(request)
def test_get_rest_from_dict():
test_get_rest(request_type=dict)
def test_get_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Instance()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Instance.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}"
% client.transport._host,
args[1],
)
def test_get_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get(
compute.GetInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_get_effective_firewalls_rest(
transport: str = "rest", request_type=compute.GetEffectiveFirewallsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstancesGetEffectiveFirewallsResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstancesGetEffectiveFirewallsResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_effective_firewalls(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.InstancesGetEffectiveFirewallsResponse)
def test_get_effective_firewalls_rest_bad_request(
transport: str = "rest", request_type=compute.GetEffectiveFirewallsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_effective_firewalls(request)
def test_get_effective_firewalls_rest_from_dict():
test_get_effective_firewalls_rest(request_type=dict)
def test_get_effective_firewalls_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstancesGetEffectiveFirewallsResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstancesGetEffectiveFirewallsResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
)
mock_args.update(sample_request)
client.get_effective_firewalls(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getEffectiveFirewalls"
% client.transport._host,
args[1],
)
def test_get_effective_firewalls_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_effective_firewalls(
compute.GetEffectiveFirewallsInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
)
def test_get_guest_attributes_rest(
transport: str = "rest", request_type=compute.GetGuestAttributesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.GuestAttributes(
kind="kind_value",
query_path="query_path_value",
self_link="self_link_value",
variable_key="variable_key_value",
variable_value="variable_value_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.GuestAttributes.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_guest_attributes(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.GuestAttributes)
assert response.kind == "kind_value"
assert response.query_path == "query_path_value"
assert response.self_link == "self_link_value"
assert response.variable_key == "variable_key_value"
assert response.variable_value == "variable_value_value"
def test_get_guest_attributes_rest_bad_request(
transport: str = "rest", request_type=compute.GetGuestAttributesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_guest_attributes(request)
def test_get_guest_attributes_rest_from_dict():
test_get_guest_attributes_rest(request_type=dict)
def test_get_guest_attributes_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.GuestAttributes()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.GuestAttributes.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get_guest_attributes(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getGuestAttributes"
% client.transport._host,
args[1],
)
def test_get_guest_attributes_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_guest_attributes(
compute.GetGuestAttributesInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_get_iam_policy_rest(
transport: str = "rest", request_type=compute.GetIamPolicyInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_iam_policy(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Policy)
assert response.etag == "etag_value"
assert response.iam_owned is True
assert response.version == 774
def test_get_iam_policy_rest_bad_request(
transport: str = "rest", request_type=compute.GetIamPolicyInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_iam_policy(request)
def test_get_iam_policy_rest_from_dict():
test_get_iam_policy_rest(request_type=dict)
def test_get_iam_policy_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"resource": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", resource="resource_value",
)
mock_args.update(sample_request)
client.get_iam_policy(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/getIamPolicy"
% client.transport._host,
args[1],
)
def test_get_iam_policy_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_iam_policy(
compute.GetIamPolicyInstanceRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
)
def test_get_screenshot_rest(
transport: str = "rest", request_type=compute.GetScreenshotInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Screenshot(contents="contents_value", kind="kind_value",)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Screenshot.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_screenshot(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Screenshot)
assert response.contents == "contents_value"
assert response.kind == "kind_value"
def test_get_screenshot_rest_bad_request(
transport: str = "rest", request_type=compute.GetScreenshotInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_screenshot(request)
def test_get_screenshot_rest_from_dict():
test_get_screenshot_rest(request_type=dict)
def test_get_screenshot_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Screenshot()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Screenshot.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get_screenshot(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/screenshot"
% client.transport._host,
args[1],
)
def test_get_screenshot_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_screenshot(
compute.GetScreenshotInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_get_serial_port_output_rest(
transport: str = "rest", request_type=compute.GetSerialPortOutputInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SerialPortOutput(
contents="contents_value",
kind="kind_value",
next_=542,
self_link="self_link_value",
start=558,
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SerialPortOutput.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_serial_port_output(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.SerialPortOutput)
assert response.contents == "contents_value"
assert response.kind == "kind_value"
assert response.next_ == 542
assert response.self_link == "self_link_value"
assert response.start == 558
def test_get_serial_port_output_rest_bad_request(
transport: str = "rest", request_type=compute.GetSerialPortOutputInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_serial_port_output(request)
def test_get_serial_port_output_rest_from_dict():
test_get_serial_port_output_rest(request_type=dict)
def test_get_serial_port_output_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SerialPortOutput()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SerialPortOutput.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get_serial_port_output(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/serialPort"
% client.transport._host,
args[1],
)
def test_get_serial_port_output_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_serial_port_output(
compute.GetSerialPortOutputInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_get_shielded_instance_identity_rest(
transport: str = "rest",
request_type=compute.GetShieldedInstanceIdentityInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ShieldedInstanceIdentity(kind="kind_value",)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.ShieldedInstanceIdentity.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_shielded_instance_identity(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.ShieldedInstanceIdentity)
assert response.kind == "kind_value"
def test_get_shielded_instance_identity_rest_bad_request(
transport: str = "rest",
request_type=compute.GetShieldedInstanceIdentityInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_shielded_instance_identity(request)
def test_get_shielded_instance_identity_rest_from_dict():
test_get_shielded_instance_identity_rest(request_type=dict)
def test_get_shielded_instance_identity_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ShieldedInstanceIdentity()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.ShieldedInstanceIdentity.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get_shielded_instance_identity(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getShieldedInstanceIdentity"
% client.transport._host,
args[1],
)
def test_get_shielded_instance_identity_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_shielded_instance_identity(
compute.GetShieldedInstanceIdentityInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_insert_rest(
transport: str = "rest", request_type=compute.InsertInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request_init["instance_resource"] = compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_insert_rest_bad_request(
transport: str = "rest", request_type=compute.InsertInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request_init["instance_resource"] = compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.insert(request)
def test_insert_rest_from_dict():
test_insert_rest(request_type=dict)
def test_insert_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "zone": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance_resource=compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
),
)
mock_args.update(sample_request)
client.insert(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances"
% client.transport._host,
args[1],
)
def test_insert_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.insert(
compute.InsertInstanceRequest(),
project="project_value",
zone="zone_value",
instance_resource=compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
),
)
def test_list_rest(transport: str = "rest", request_type=compute.ListInstancesRequest):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
def test_list_rest_bad_request(
transport: str = "rest", request_type=compute.ListInstancesRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list(request)
def test_list_rest_from_dict():
test_list_rest(request_type=dict)
def test_list_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceList()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "zone": "sample2"}
# get truthy value for each flattened field
mock_args = dict(project="project_value", zone="zone_value",)
mock_args.update(sample_request)
client.list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances"
% client.transport._host,
args[1],
)
def test_list_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list(
compute.ListInstancesRequest(), project="project_value", zone="zone_value",
)
def test_list_rest_pager():
client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.InstanceList(
items=[compute.Instance(), compute.Instance(), compute.Instance(),],
next_page_token="abc",
),
compute.InstanceList(items=[], next_page_token="def",),
compute.InstanceList(items=[compute.Instance(),], next_page_token="ghi",),
compute.InstanceList(items=[compute.Instance(), compute.Instance(),],),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.InstanceList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1", "zone": "sample2"}
pager = client.list(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.Instance) for i in results)
pages = list(client.list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_list_referrers_rest(
transport: str = "rest", request_type=compute.ListReferrersInstancesRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceListReferrers(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceListReferrers.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_referrers(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListReferrersPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
def test_list_referrers_rest_bad_request(
transport: str = "rest", request_type=compute.ListReferrersInstancesRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list_referrers(request)
def test_list_referrers_rest_from_dict():
test_list_referrers_rest(request_type=dict)
def test_list_referrers_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceListReferrers()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceListReferrers.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.list_referrers(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/referrers"
% client.transport._host,
args[1],
)
def test_list_referrers_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_referrers(
compute.ListReferrersInstancesRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_list_referrers_rest_pager():
client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.InstanceListReferrers(
items=[compute.Reference(), compute.Reference(), compute.Reference(),],
next_page_token="abc",
),
compute.InstanceListReferrers(items=[], next_page_token="def",),
compute.InstanceListReferrers(
items=[compute.Reference(),], next_page_token="ghi",
),
compute.InstanceListReferrers(
items=[compute.Reference(), compute.Reference(),],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.InstanceListReferrers.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
pager = client.list_referrers(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.Reference) for i in results)
pages = list(client.list_referrers(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_remove_resource_policies_rest(
transport: str = "rest", request_type=compute.RemoveResourcePoliciesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_remove_resource_policies_request_resource"
] = compute.InstancesRemoveResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.remove_resource_policies(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_remove_resource_policies_rest_bad_request(
transport: str = "rest", request_type=compute.RemoveResourcePoliciesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_remove_resource_policies_request_resource"
] = compute.InstancesRemoveResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.remove_resource_policies(request)
def test_remove_resource_policies_rest_from_dict():
test_remove_resource_policies_rest(request_type=dict)
def test_remove_resource_policies_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_remove_resource_policies_request_resource=compute.InstancesRemoveResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
),
)
mock_args.update(sample_request)
client.remove_resource_policies(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/removeResourcePolicies"
% client.transport._host,
args[1],
)
def test_remove_resource_policies_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.remove_resource_policies(
compute.RemoveResourcePoliciesInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_remove_resource_policies_request_resource=compute.InstancesRemoveResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
),
)
def test_reset_rest(transport: str = "rest", request_type=compute.ResetInstanceRequest):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.reset(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_reset_rest_bad_request(
transport: str = "rest", request_type=compute.ResetInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.reset(request)
def test_reset_rest_from_dict():
test_reset_rest(request_type=dict)
def test_reset_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.reset(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/reset"
% client.transport._host,
args[1],
)
def test_reset_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.reset(
compute.ResetInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_send_diagnostic_interrupt_rest(
transport: str = "rest", request_type=compute.SendDiagnosticInterruptInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SendDiagnosticInterruptInstanceResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SendDiagnosticInterruptInstanceResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.send_diagnostic_interrupt(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.SendDiagnosticInterruptInstanceResponse)
def test_send_diagnostic_interrupt_rest_bad_request(
transport: str = "rest", request_type=compute.SendDiagnosticInterruptInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.send_diagnostic_interrupt(request)
def test_send_diagnostic_interrupt_rest_from_dict():
test_send_diagnostic_interrupt_rest(request_type=dict)
def test_send_diagnostic_interrupt_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SendDiagnosticInterruptInstanceResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SendDiagnosticInterruptInstanceResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.send_diagnostic_interrupt(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/sendDiagnosticInterrupt"
% client.transport._host,
args[1],
)
def test_send_diagnostic_interrupt_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.send_diagnostic_interrupt(
compute.SendDiagnosticInterruptInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_set_deletion_protection_rest(
transport: str = "rest", request_type=compute.SetDeletionProtectionInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_deletion_protection(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_deletion_protection_rest_bad_request(
transport: str = "rest", request_type=compute.SetDeletionProtectionInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_deletion_protection(request)
def test_set_deletion_protection_rest_from_dict():
test_set_deletion_protection_rest(request_type=dict)
def test_set_deletion_protection_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"resource": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", resource="resource_value",
)
mock_args.update(sample_request)
client.set_deletion_protection(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setDeletionProtection"
% client.transport._host,
args[1],
)
def test_set_deletion_protection_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_deletion_protection(
compute.SetDeletionProtectionInstanceRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
)
def test_set_disk_auto_delete_rest(
transport: str = "rest", request_type=compute.SetDiskAutoDeleteInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_disk_auto_delete(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_disk_auto_delete_rest_bad_request(
transport: str = "rest", request_type=compute.SetDiskAutoDeleteInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_disk_auto_delete(request)
def test_set_disk_auto_delete_rest_from_dict():
test_set_disk_auto_delete_rest(request_type=dict)
def test_set_disk_auto_delete_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
auto_delete=True,
device_name="device_name_value",
)
mock_args.update(sample_request)
client.set_disk_auto_delete(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete"
% client.transport._host,
args[1],
)
def test_set_disk_auto_delete_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_disk_auto_delete(
compute.SetDiskAutoDeleteInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
auto_delete=True,
device_name="device_name_value",
)
def test_set_iam_policy_rest(
transport: str = "rest", request_type=compute.SetIamPolicyInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest(
bindings=[compute.Binding(binding_id="binding_id_value")]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_iam_policy(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Policy)
assert response.etag == "etag_value"
assert response.iam_owned is True
assert response.version == 774
def test_set_iam_policy_rest_bad_request(
transport: str = "rest", request_type=compute.SetIamPolicyInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest(
bindings=[compute.Binding(binding_id="binding_id_value")]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_iam_policy(request)
def test_set_iam_policy_rest_from_dict():
test_set_iam_policy_rest(request_type=dict)
def test_set_iam_policy_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"resource": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
resource="resource_value",
zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(
bindings=[compute.Binding(binding_id="binding_id_value")]
),
)
mock_args.update(sample_request)
client.set_iam_policy(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setIamPolicy"
% client.transport._host,
args[1],
)
def test_set_iam_policy_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_iam_policy(
compute.SetIamPolicyInstanceRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(
bindings=[compute.Binding(binding_id="binding_id_value")]
),
)
def test_set_labels_rest(
transport: str = "rest", request_type=compute.SetLabelsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_labels_request_resource"
] = compute.InstancesSetLabelsRequest(label_fingerprint="label_fingerprint_value")
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_labels(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_labels_rest_bad_request(
transport: str = "rest", request_type=compute.SetLabelsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_labels_request_resource"
] = compute.InstancesSetLabelsRequest(label_fingerprint="label_fingerprint_value")
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_labels(request)
def test_set_labels_rest_from_dict():
test_set_labels_rest(request_type=dict)
def test_set_labels_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_labels_request_resource=compute.InstancesSetLabelsRequest(
label_fingerprint="label_fingerprint_value"
),
)
mock_args.update(sample_request)
client.set_labels(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setLabels"
% client.transport._host,
args[1],
)
def test_set_labels_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_labels(
compute.SetLabelsInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_labels_request_resource=compute.InstancesSetLabelsRequest(
label_fingerprint="label_fingerprint_value"
),
)
def test_set_machine_resources_rest(
transport: str = "rest", request_type=compute.SetMachineResourcesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_machine_resources_request_resource"
] = compute.InstancesSetMachineResourcesRequest(
guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_machine_resources(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_machine_resources_rest_bad_request(
transport: str = "rest", request_type=compute.SetMachineResourcesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_machine_resources_request_resource"
] = compute.InstancesSetMachineResourcesRequest(
guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_machine_resources(request)
def test_set_machine_resources_rest_from_dict():
test_set_machine_resources_rest(request_type=dict)
def test_set_machine_resources_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_machine_resources_request_resource=compute.InstancesSetMachineResourcesRequest(
guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]
),
)
mock_args.update(sample_request)
client.set_machine_resources(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineResources"
% client.transport._host,
args[1],
)
def test_set_machine_resources_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_machine_resources(
compute.SetMachineResourcesInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_machine_resources_request_resource=compute.InstancesSetMachineResourcesRequest(
guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]
),
)
def test_set_machine_type_rest(
transport: str = "rest", request_type=compute.SetMachineTypeInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_machine_type_request_resource"
] = compute.InstancesSetMachineTypeRequest(machine_type="machine_type_value")
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_machine_type(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_machine_type_rest_bad_request(
transport: str = "rest", request_type=compute.SetMachineTypeInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_machine_type_request_resource"
] = compute.InstancesSetMachineTypeRequest(machine_type="machine_type_value")
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_machine_type(request)
def test_set_machine_type_rest_from_dict():
test_set_machine_type_rest(request_type=dict)
def test_set_machine_type_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_machine_type_request_resource=compute.InstancesSetMachineTypeRequest(
machine_type="machine_type_value"
),
)
mock_args.update(sample_request)
client.set_machine_type(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineType"
% client.transport._host,
args[1],
)
def test_set_machine_type_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_machine_type(
compute.SetMachineTypeInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_machine_type_request_resource=compute.InstancesSetMachineTypeRequest(
machine_type="machine_type_value"
),
)
def test_set_metadata_rest(
transport: str = "rest", request_type=compute.SetMetadataInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["metadata_resource"] = compute.Metadata(
fingerprint="fingerprint_value"
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_metadata(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_metadata_rest_bad_request(
transport: str = "rest", request_type=compute.SetMetadataInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["metadata_resource"] = compute.Metadata(
fingerprint="fingerprint_value"
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_metadata(request)
def test_set_metadata_rest_from_dict():
test_set_metadata_rest(request_type=dict)
def test_set_metadata_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
metadata_resource=compute.Metadata(fingerprint="fingerprint_value"),
)
mock_args.update(sample_request)
client.set_metadata(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMetadata"
% client.transport._host,
args[1],
)
def test_set_metadata_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_metadata(
compute.SetMetadataInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
metadata_resource=compute.Metadata(fingerprint="fingerprint_value"),
)
def test_set_min_cpu_platform_rest(
transport: str = "rest", request_type=compute.SetMinCpuPlatformInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_min_cpu_platform_request_resource"
] = compute.InstancesSetMinCpuPlatformRequest(
min_cpu_platform="min_cpu_platform_value"
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_min_cpu_platform(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_min_cpu_platform_rest_bad_request(
transport: str = "rest", request_type=compute.SetMinCpuPlatformInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_min_cpu_platform_request_resource"
] = compute.InstancesSetMinCpuPlatformRequest(
min_cpu_platform="min_cpu_platform_value"
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_min_cpu_platform(request)
def test_set_min_cpu_platform_rest_from_dict():
test_set_min_cpu_platform_rest(request_type=dict)
def test_set_min_cpu_platform_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_min_cpu_platform_request_resource=compute.InstancesSetMinCpuPlatformRequest(
min_cpu_platform="min_cpu_platform_value"
),
)
mock_args.update(sample_request)
client.set_min_cpu_platform(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform"
% client.transport._host,
args[1],
)
def test_set_min_cpu_platform_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_min_cpu_platform(
compute.SetMinCpuPlatformInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_min_cpu_platform_request_resource=compute.InstancesSetMinCpuPlatformRequest(
min_cpu_platform="min_cpu_platform_value"
),
)
def test_set_scheduling_rest(
transport: str = "rest", request_type=compute.SetSchedulingInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["scheduling_resource"] = compute.Scheduling(automatic_restart=True)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_scheduling(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_scheduling_rest_bad_request(
transport: str = "rest", request_type=compute.SetSchedulingInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["scheduling_resource"] = compute.Scheduling(automatic_restart=True)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_scheduling(request)
def test_set_scheduling_rest_from_dict():
test_set_scheduling_rest(request_type=dict)
def test_set_scheduling_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
scheduling_resource=compute.Scheduling(automatic_restart=True),
)
mock_args.update(sample_request)
client.set_scheduling(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setScheduling"
% client.transport._host,
args[1],
)
def test_set_scheduling_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_scheduling(
compute.SetSchedulingInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
scheduling_resource=compute.Scheduling(automatic_restart=True),
)
def test_set_service_account_rest(
transport: str = "rest", request_type=compute.SetServiceAccountInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_service_account_request_resource"
] = compute.InstancesSetServiceAccountRequest(email="email_value")
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_service_account(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_service_account_rest_bad_request(
transport: str = "rest", request_type=compute.SetServiceAccountInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_set_service_account_request_resource"
] = compute.InstancesSetServiceAccountRequest(email="email_value")
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_service_account(request)
def test_set_service_account_rest_from_dict():
test_set_service_account_rest(request_type=dict)
def test_set_service_account_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest(
email="email_value"
),
)
mock_args.update(sample_request)
client.set_service_account(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount"
% client.transport._host,
args[1],
)
def test_set_service_account_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_service_account(
compute.SetServiceAccountInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest(
email="email_value"
),
)
def test_set_shielded_instance_integrity_policy_rest(
transport: str = "rest",
request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"shielded_instance_integrity_policy_resource"
] = compute.ShieldedInstanceIntegrityPolicy(update_auto_learn_policy=True)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_shielded_instance_integrity_policy(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_shielded_instance_integrity_policy_rest_bad_request(
transport: str = "rest",
request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"shielded_instance_integrity_policy_resource"
] = compute.ShieldedInstanceIntegrityPolicy(update_auto_learn_policy=True)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_shielded_instance_integrity_policy(request)
def test_set_shielded_instance_integrity_policy_rest_from_dict():
test_set_shielded_instance_integrity_policy_rest(request_type=dict)
def test_set_shielded_instance_integrity_policy_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy(
update_auto_learn_policy=True
),
)
mock_args.update(sample_request)
client.set_shielded_instance_integrity_policy(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy"
% client.transport._host,
args[1],
)
def test_set_shielded_instance_integrity_policy_rest_flattened_error(
transport: str = "rest",
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_shielded_instance_integrity_policy(
compute.SetShieldedInstanceIntegrityPolicyInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy(
update_auto_learn_policy=True
),
)
def test_set_tags_rest(
transport: str = "rest", request_type=compute.SetTagsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["tags_resource"] = compute.Tags(fingerprint="fingerprint_value")
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_tags(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_tags_rest_bad_request(
transport: str = "rest", request_type=compute.SetTagsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["tags_resource"] = compute.Tags(fingerprint="fingerprint_value")
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_tags(request)
def test_set_tags_rest_from_dict():
test_set_tags_rest(request_type=dict)
def test_set_tags_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
tags_resource=compute.Tags(fingerprint="fingerprint_value"),
)
mock_args.update(sample_request)
client.set_tags(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setTags"
% client.transport._host,
args[1],
)
def test_set_tags_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_tags(
compute.SetTagsInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
tags_resource=compute.Tags(fingerprint="fingerprint_value"),
)
def test_simulate_maintenance_event_rest(
transport: str = "rest",
request_type=compute.SimulateMaintenanceEventInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.simulate_maintenance_event(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_simulate_maintenance_event_rest_bad_request(
transport: str = "rest",
request_type=compute.SimulateMaintenanceEventInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.simulate_maintenance_event(request)
def test_simulate_maintenance_event_rest_from_dict():
test_simulate_maintenance_event_rest(request_type=dict)
def test_simulate_maintenance_event_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.simulate_maintenance_event(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/simulateMaintenanceEvent"
% client.transport._host,
args[1],
)
def test_simulate_maintenance_event_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.simulate_maintenance_event(
compute.SimulateMaintenanceEventInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_start_rest(transport: str = "rest", request_type=compute.StartInstanceRequest):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.start(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_start_rest_bad_request(
transport: str = "rest", request_type=compute.StartInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.start(request)
def test_start_rest_from_dict():
test_start_rest(request_type=dict)
def test_start_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.start(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/start"
% client.transport._host,
args[1],
)
def test_start_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.start(
compute.StartInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_start_with_encryption_key_rest(
transport: str = "rest", request_type=compute.StartWithEncryptionKeyInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_start_with_encryption_key_request_resource"
] = compute.InstancesStartWithEncryptionKeyRequest(
disks=[
compute.CustomerEncryptionKeyProtectedDisk(
disk_encryption_key=compute.CustomerEncryptionKey(
kms_key_name="kms_key_name_value"
)
)
]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.start_with_encryption_key(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_start_with_encryption_key_rest_bad_request(
transport: str = "rest", request_type=compute.StartWithEncryptionKeyInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init[
"instances_start_with_encryption_key_request_resource"
] = compute.InstancesStartWithEncryptionKeyRequest(
disks=[
compute.CustomerEncryptionKeyProtectedDisk(
disk_encryption_key=compute.CustomerEncryptionKey(
kms_key_name="kms_key_name_value"
)
)
]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.start_with_encryption_key(request)
def test_start_with_encryption_key_rest_from_dict():
test_start_with_encryption_key_rest(request_type=dict)
def test_start_with_encryption_key_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_start_with_encryption_key_request_resource=compute.InstancesStartWithEncryptionKeyRequest(
disks=[
compute.CustomerEncryptionKeyProtectedDisk(
disk_encryption_key=compute.CustomerEncryptionKey(
kms_key_name="kms_key_name_value"
)
)
]
),
)
mock_args.update(sample_request)
client.start_with_encryption_key(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey"
% client.transport._host,
args[1],
)
def test_start_with_encryption_key_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.start_with_encryption_key(
compute.StartWithEncryptionKeyInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_start_with_encryption_key_request_resource=compute.InstancesStartWithEncryptionKeyRequest(
disks=[
compute.CustomerEncryptionKeyProtectedDisk(
disk_encryption_key=compute.CustomerEncryptionKey(
kms_key_name="kms_key_name_value"
)
)
]
),
)
def test_stop_rest(transport: str = "rest", request_type=compute.StopInstanceRequest):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.stop(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_stop_rest_bad_request(
transport: str = "rest", request_type=compute.StopInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.stop(request)
def test_stop_rest_from_dict():
test_stop_rest(request_type=dict)
def test_stop_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.stop(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/stop"
% client.transport._host,
args[1],
)
def test_stop_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.stop(
compute.StopInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_test_iam_permissions_rest(
transport: str = "rest", request_type=compute.TestIamPermissionsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest(
permissions=["permissions_value"]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TestPermissionsResponse(
permissions=["permissions_value"],
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.TestPermissionsResponse.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.test_iam_permissions(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.TestPermissionsResponse)
assert response.permissions == ["permissions_value"]
def test_test_iam_permissions_rest_bad_request(
transport: str = "rest", request_type=compute.TestIamPermissionsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest(
permissions=["permissions_value"]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.test_iam_permissions(request)
def test_test_iam_permissions_rest_from_dict():
test_test_iam_permissions_rest(request_type=dict)
def test_test_iam_permissions_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TestPermissionsResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.TestPermissionsResponse.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"resource": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
resource="resource_value",
test_permissions_request_resource=compute.TestPermissionsRequest(
permissions=["permissions_value"]
),
)
mock_args.update(sample_request)
client.test_iam_permissions(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/testIamPermissions"
% client.transport._host,
args[1],
)
def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.test_iam_permissions(
compute.TestIamPermissionsInstanceRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
test_permissions_request_resource=compute.TestPermissionsRequest(
permissions=["permissions_value"]
),
)
def test_update_rest(
transport: str = "rest", request_type=compute.UpdateInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instance_resource"] = compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_rest_bad_request(
transport: str = "rest", request_type=compute.UpdateInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instance_resource"] = compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update(request)
def test_update_rest_from_dict():
test_update_rest(request_type=dict)
def test_update_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instance_resource=compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
),
)
mock_args.update(sample_request)
client.update(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}"
% client.transport._host,
args[1],
)
def test_update_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update(
compute.UpdateInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instance_resource=compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
),
)
def test_update_access_config_rest(
transport: str = "rest", request_type=compute.UpdateAccessConfigInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["access_config_resource"] = compute.AccessConfig(
external_ipv6="external_ipv6_value"
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_access_config(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_access_config_rest_bad_request(
transport: str = "rest", request_type=compute.UpdateAccessConfigInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["access_config_resource"] = compute.AccessConfig(
external_ipv6="external_ipv6_value"
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_access_config(request)
def test_update_access_config_rest_from_dict():
test_update_access_config_rest(request_type=dict)
def test_update_access_config_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
access_config_resource=compute.AccessConfig(
external_ipv6="external_ipv6_value"
),
)
mock_args.update(sample_request)
client.update_access_config(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig"
% client.transport._host,
args[1],
)
def test_update_access_config_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_access_config(
compute.UpdateAccessConfigInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
access_config_resource=compute.AccessConfig(
external_ipv6="external_ipv6_value"
),
)
def test_update_display_device_rest(
transport: str = "rest", request_type=compute.UpdateDisplayDeviceInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["display_device_resource"] = compute.DisplayDevice(enable_display=True)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_display_device(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_display_device_rest_bad_request(
transport: str = "rest", request_type=compute.UpdateDisplayDeviceInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["display_device_resource"] = compute.DisplayDevice(enable_display=True)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_display_device(request)
def test_update_display_device_rest_from_dict():
test_update_display_device_rest(request_type=dict)
def test_update_display_device_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
display_device_resource=compute.DisplayDevice(enable_display=True),
)
mock_args.update(sample_request)
client.update_display_device(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateDisplayDevice"
% client.transport._host,
args[1],
)
def test_update_display_device_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_display_device(
compute.UpdateDisplayDeviceInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
display_device_resource=compute.DisplayDevice(enable_display=True),
)
def test_update_network_interface_rest(
transport: str = "rest", request_type=compute.UpdateNetworkInterfaceInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["network_interface_resource"] = compute.NetworkInterface(
access_configs=[compute.AccessConfig(external_ipv6="external_ipv6_value")]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_network_interface(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_network_interface_rest_bad_request(
transport: str = "rest", request_type=compute.UpdateNetworkInterfaceInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["network_interface_resource"] = compute.NetworkInterface(
access_configs=[compute.AccessConfig(external_ipv6="external_ipv6_value")]
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_network_interface(request)
def test_update_network_interface_rest_from_dict():
test_update_network_interface_rest(request_type=dict)
def test_update_network_interface_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
network_interface_resource=compute.NetworkInterface(
access_configs=[
compute.AccessConfig(external_ipv6="external_ipv6_value")
]
),
)
mock_args.update(sample_request)
client.update_network_interface(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateNetworkInterface"
% client.transport._host,
args[1],
)
def test_update_network_interface_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_network_interface(
compute.UpdateNetworkInterfaceInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
network_interface_resource=compute.NetworkInterface(
access_configs=[
compute.AccessConfig(external_ipv6="external_ipv6_value")
]
),
)
def test_update_shielded_instance_config_rest(
transport: str = "rest",
request_type=compute.UpdateShieldedInstanceConfigInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["shielded_instance_config_resource"] = compute.ShieldedInstanceConfig(
enable_integrity_monitoring=True
)
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_shielded_instance_config(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_shielded_instance_config_rest_bad_request(
transport: str = "rest",
request_type=compute.UpdateShieldedInstanceConfigInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["shielded_instance_config_resource"] = compute.ShieldedInstanceConfig(
enable_integrity_monitoring=True
)
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_shielded_instance_config(request)
def test_update_shielded_instance_config_rest_from_dict():
test_update_shielded_instance_config_rest(request_type=dict)
def test_update_shielded_instance_config_rest_flattened(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
shielded_instance_config_resource=compute.ShieldedInstanceConfig(
enable_integrity_monitoring=True
),
)
mock_args.update(sample_request)
client.update_shielded_instance_config(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateShieldedInstanceConfig"
% client.transport._host,
args[1],
)
def test_update_shielded_instance_config_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_shielded_instance_config(
compute.UpdateShieldedInstanceConfigInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
shielded_instance_config_resource=compute.ShieldedInstanceConfig(
enable_integrity_monitoring=True
),
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = InstancesClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = InstancesClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = InstancesClient(transport=transport)
assert client.transport is transport
@pytest.mark.parametrize("transport_class", [transports.InstancesRestTransport,])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_instances_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.InstancesTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_instances_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.compute_v1.services.instances.transports.InstancesTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.InstancesTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"add_access_config",
"add_resource_policies",
"aggregated_list",
"attach_disk",
"bulk_insert",
"delete",
"delete_access_config",
"detach_disk",
"get",
"get_effective_firewalls",
"get_guest_attributes",
"get_iam_policy",
"get_screenshot",
"get_serial_port_output",
"get_shielded_instance_identity",
"insert",
"list",
"list_referrers",
"remove_resource_policies",
"reset",
"send_diagnostic_interrupt",
"set_deletion_protection",
"set_disk_auto_delete",
"set_iam_policy",
"set_labels",
"set_machine_resources",
"set_machine_type",
"set_metadata",
"set_min_cpu_platform",
"set_scheduling",
"set_service_account",
"set_shielded_instance_integrity_policy",
"set_tags",
"simulate_maintenance_event",
"start",
"start_with_encryption_key",
"stop",
"test_iam_permissions",
"update",
"update_access_config",
"update_display_device",
"update_network_interface",
"update_shielded_instance_config",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
def test_instances_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.instances.transports.InstancesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.InstancesTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_instances_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.compute_v1.services.instances.transports.InstancesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.InstancesTransport()
adc.assert_called_once()
def test_instances_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
InstancesClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
def test_instances_http_transport_client_cert_source_for_mtls():
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
transports.InstancesRestTransport(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
def test_instances_host_no_port():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com"
),
)
assert client.transport._host == "compute.googleapis.com:443"
def test_instances_host_with_port():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com:8000"
),
)
assert client.transport._host == "compute.googleapis.com:8000"
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = InstancesClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = InstancesClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = InstancesClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = InstancesClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = InstancesClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = InstancesClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = InstancesClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = InstancesClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = InstancesClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = InstancesClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.InstancesTransport, "_prep_wrapped_messages"
) as prep:
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.InstancesTransport, "_prep_wrapped_messages"
) as prep:
transport_class = InstancesClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
def test_transport_close():
transports = {
"rest": "_session",
}
for transport, close_name in transports.items():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"rest",
]
for transport in transports:
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
| 39.221071
| 123
| 0.682746
| 33,185
| 298,943
| 5.874763
| 0.01787
| 0.051489
| 0.050289
| 0.026468
| 0.946875
| 0.928111
| 0.911025
| 0.891318
| 0.874093
| 0.867394
| 0
| 0.008486
| 0.232877
| 298,943
| 7,621
| 124
| 39.226217
| 0.841632
| 0.13732
| 0
| 0.759698
| 0
| 0.006319
| 0.149732
| 0.039236
| 0
| 0
| 0
| 0.000131
| 0.160435
| 1
| 0.044409
| false
| 0.000176
| 0.004213
| 0.000351
| 0.048973
| 0.002809
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e40dfae97c63f003f422eb535e5133642ea1acf3
| 143
|
py
|
Python
|
ipython/startup/import_faker.py
|
dycw/dotfiles2
|
9e23c4989e9813080da3658a8f98dbb1e03776f2
|
[
"MIT"
] | null | null | null |
ipython/startup/import_faker.py
|
dycw/dotfiles2
|
9e23c4989e9813080da3658a8f98dbb1e03776f2
|
[
"MIT"
] | null | null | null |
ipython/startup/import_faker.py
|
dycw/dotfiles2
|
9e23c4989e9813080da3658a8f98dbb1e03776f2
|
[
"MIT"
] | null | null | null |
from contextlib import suppress
with suppress(ModuleNotFoundError):
import faker # noqa: F401
from faker import Faker # noqa: F401
| 20.428571
| 41
| 0.741259
| 17
| 143
| 6.235294
| 0.529412
| 0.207547
| 0.283019
| 0.358491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053097
| 0.20979
| 143
| 6
| 42
| 23.833333
| 0.884956
| 0.146853
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e4a8d550ce448d504d79ef469265e54632730762
| 45,422
|
py
|
Python
|
sancus/lib/cogs/mod/views.py
|
Solar-Productions/sancus
|
eb3c5c702bc5574c62b488c0e3bb06a36159e651
|
[
"Apache-2.0"
] | 1
|
2021-09-03T22:52:27.000Z
|
2021-09-03T22:52:27.000Z
|
sancus/lib/cogs/mod/views.py
|
LunarDevelop/sancus
|
eb3c5c702bc5574c62b488c0e3bb06a36159e651
|
[
"Apache-2.0"
] | 1
|
2021-10-10T22:11:51.000Z
|
2021-10-10T22:11:51.000Z
|
sancus/lib/cogs/mod/views.py
|
Solar-Productions/sancus
|
eb3c5c702bc5574c62b488c0e3bb06a36159e651
|
[
"Apache-2.0"
] | 1
|
2021-11-11T16:04:02.000Z
|
2021-11-11T16:04:02.000Z
|
from configparser import ConfigParser
import io
import json
import discord
from discord import reaction
from discord.enums import ButtonStyle
from discord.errors import NotFound
from discord.message import Attachment, Message
from functions.objects import Embeds
from discord.ui import button, Button
from discord.interactions import Interaction
from discord.ui.view import View
import requests
async def welcomeEmbed(self, button: Button, interaction: Interaction):
if self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeType"]:
cur_toggle = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeMessage"]
cur_type = "Banner Message"
if self.bot.client.guilds_[str(interaction.guild_id)]["welcomeChannel"] != None:
cur_channel = await self.bot.client.fetch_channel(self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeChannel"])
cur_channel = cur_channel.mention
else:
cur_channel = None
cur_bg = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeBack"]
cur_banner = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeBanner"]
cur_icon = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeIcon"]
cur_colour_txt = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeTxtColor"]
cur_colour_user = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeUserColor"]
cur_colour_members = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeMembersColor"]
embed = Embeds(
title="Welcome Message Menu",
description=f"""Change the settings of your welcome message.
**On or Off** : {cur_toggle}
**Welcome Type** : {cur_type}
**Channel** : {cur_channel}
**Background** : {cur_bg}
**Banner** : {cur_banner}
**Icon** : {cur_icon}
**Text Color** : {cur_colour_txt}
**User Text Color** : {cur_colour_user}
**Member Count Text Color** : {cur_colour_members}
"""
)
await interaction.message.edit(embed=embed, view=self.bot.welcomeBanner(self.bot))
else:
cur_type = "Text Based Message"
cur_toggle = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeMessage"]
cur_text = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeText"]
if self.bot.client.guilds_[str(interaction.guild_id)]["welcomeChannel"] != None:
cur_channel = await self.bot.client.fetch_channel(self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeChannel"])
cur_channel = cur_channel.mention
else:
cur_channel = None
embed = Embeds(
title="Welcome Message Menu",
description=f"""Change the settings of your welcome message.
**On or Off** : {cur_toggle}
**Welcome Type** : {cur_type}
**Channel** : {cur_channel}
**Text** : {cur_text}
"""
)
await interaction.message.edit(embed=embed, view=self.bot.welcomeText(self.bot))
async def logEmbed(self, button: Button, interaction: Interaction):
try:
log = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["logChannel"]))
except:
log = None
try:
auto = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["autoLogChannel"]))
except:
auto = "Disabled"
try:
case = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["caseChannel"]))
except:
case = "Disabled"
try:
ModCmdLog = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["modCmdChannel"]))
except:
ModCmdLog = "Disabled"
return Embeds(
title="Log Channel Menu",
description=f"""Change the settings of your log channel.
```The log channel is the default channel,
If you change any of the other log methods then they will use that channel.
You can always turn off the separate log channel by changing it to 0```
**Log Channel:** {log.mention}
**Case Log Channel:**
**Mod Command Log Channel:**
**Auto Mod Log Channel:**
**Nickname Log Channel:**
"""
)
class main():
class menu(View):
def __init__(self, bot):
super().__init__()
self.bot = bot
@button(
label="Prefix",
style=ButtonStyle.blurple)
async def prefix(self, button: Button, interaction: Interaction):
"""Change your server's prefix to use the bot.
Prefix cannot be more then 5 charcters in length
"""
cur_prefix = self.bot.client.guilds_[
str(interaction.guild_id)]["prefix"]
menu = self.bot.Prefix(self.bot)
embed = Embeds(
title="Prefix Menu",
description=f"If you would like to change the prefix please select **Change** below and type the new prefix in this channel. Otherwise click main menu.\n\n`Current Prefix:` **{cur_prefix}**"
)
await interaction.response.edit_message(embed=embed, view=menu)
@button(
label="Filter",
style=ButtonStyle.blurple
)
async def filter(self, button: Button, interaction: Interaction):
cur_filter = self.bot.client.guilds_[
str(interaction.guild_id)]["filter"]
cur_type = self.bot.client.guilds_[
str(interaction.guild_id)]["filterDelete"]
embed = Embeds(
title="Filter Menu",
description=f"""Change the settings of your filter and custom filter.
Filter: {cur_filter}
Delete Messages: {cur_type}
"""
)
await interaction.response.edit_message(embed=embed, view=self.bot.Filter(self.bot))
@button(
label="Log Channel",
style=ButtonStyle.blurple
)
async def log(self, button: Button, interaction: Interaction):
if self.bot.client.guilds_[
str(interaction.guild_id)]["logChannel"] != None:
try:
cur_channel = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["logChannel"]))
except:cur_channel = None
else:
cur_channel = None
embed = await logEmbed(self, button, interaction)
await interaction.response.edit_message(embed=embed, view=self.bot.log(self.bot))
@button(
label="Action Channel",
style=ButtonStyle.blurple
)
async def action(self, button: Button, interaction: Interaction):
if self.bot.client.guilds_[
str(interaction.guild_id)]["actionChannel"] != None:
cur_channel = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["actionChannel"]))
else:
cur_channel = None
embed = Embeds(
title="Action Log Channel Menu",
description=f"""Change the settings of your action log channel.
Action Log Channel: {cur_channel}
"""
)
await interaction.response.edit_message(embed=embed, view=self.bot.action(self.bot))
@button(
label="Manage Welcoming",
style=ButtonStyle.blurple
)
async def welcome(self, button: Button, interaction: Interaction):
await welcomeEmbed(self, button, interaction)
@button(
label="Close!",
style=ButtonStyle.red
)
async def close(self, button: Button, interaction: Interaction):
if self.bot.client.guilds_[
str(interaction.guild_id)]["logChannel"] != None:
log = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["logChannel"]))
else:
log = None
if self.bot.client.guilds_[
str(interaction.guild_id)]["actionChannel"] != None:
action = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["actionChannel"]))
else:
action = None
embed = Embeds(
title=f"{interaction.guild.name}'s Guild Settings",
description=f"""Prefix: **{self.bot.client.guilds_[str(interaction.guild.id)]["prefix"]}**
Filter On: **{self.bot.client.guilds_[str(interaction.guild.id)]["filter"]}**
Filter Delete Messages: **{self.bot.client.guilds_[str(interaction.guild.id)]["filterDelete"]}**
Custom Filter: **{self.bot.client.guilds_[str(interaction.guild.id)]["filterWords"]}**
Log Channel: **{log}**
Action Channel: **{action}**
""",
colour=0x000e8a302
)
await interaction.response.edit_message(embed=embed, view=None)
class Prefix(View):
def __init__(self, bot):
super().__init__()
self.bot = bot
@button(
label="Change Prefix",
custom_id="change",
style=ButtonStyle.green,
row=0)
async def recieve(self, button: Button, interaction: Interaction):
while True:
cur_prefix = self.bot.client.guilds_[
str(interaction.guild.id)]["prefix"]
embed = Embeds(
title="Change Prefix",
description="Type your new prefix below"
)
msg = await interaction.response.edit_message(embed=embed, view=None)
valueObj: Message = await self.bot.client.wait_for("message", timeout=120, check=lambda i: i.author.id == interaction.user.id)
value = valueObj.content
if len(value) < 1 or len(value) > 5:
embed.description = "Prefix needs to be between 1 and 5 characters long"
elif value != None and value != cur_prefix:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"prefix": value})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await valueObj.delete()
break
msg = interaction.message
cur_prefix = self.bot.client.guilds_[
str(interaction.guild_id)]["prefix"]
value = None
menu = self.bot.Prefix(self.bot)
embed = Embeds(
title="Prefix Menu",
description=f"If you would like to change the prefix please select **Change** below and type the new prefix in this channel. Otherwise click main menu.\n\n`Current Prefix:` **{cur_prefix}**"
)
await msg.edit(embed=embed, view=menu)
@button(
label="Main Menu",
custom_id="menu",
style=ButtonStyle.blurple)
async def receive(self, button: Button, interaction: Interaction):
embed = Embeds(
title=f"{interaction.guild.name}'s Settings",
colour=0x000e8a302)
fields = [
("Changing Comand Prefix",
f"Change how users on your server use commands on {self.bot.client.user.name}"),
("Filter setting",
f"Change how the filter reacts, if its on and the custom filter"),
("LogChannel",
f"Change the log channel that your server uses."),
("Action Channel",
f"Change the action channel for {interaction.guild.name}."),
("Welcome User Message",
f"Open the welcome message editor"),
]
for name, value in fields:
embed.add_field(name=name, value=value, inline=False)
msg: Message = await interaction.response.edit_message(embed=embed, view=self.bot.menu(self.bot))
class Filter(View):
def __init__(self, bot):
super().__init__()
self.bot = bot
@button(
label="Toggle Filter",
style=ButtonStyle.blurple
)
async def toggle(self, button: Button, interaction: Interaction):
if self.bot.client.guilds_[str(interaction.guild_id)]["filter"]:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"filter": False})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
else:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"filter": True})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
cur_filter = self.bot.client.guilds_[
str(interaction.guild_id)]["filter"]
cur_type = self.bot.client.guilds_[
str(interaction.guild_id)]["filterDelete"]
embed = Embeds(
title="Filter Menu",
description=f"""Change the settings of your filter and custom filter.
Filter: {cur_filter}
Delete Messages: {cur_type}
"""
)
await interaction.response.edit_message(embed=embed, view=self.bot.Filter(self.bot))
@button(
label="Toggle Filter Delete",
style=ButtonStyle.red
)
async def delete(self, button: Button, interaction: Interaction):
if self.bot.client.guilds_[str(interaction.guild_id)]["filterDelete"]:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"filterDelete": False})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
else:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"filterDelete": True})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
cur_filter = self.bot.client.guilds_[
str(interaction.guild_id)]["filter"]
cur_type = self.bot.client.guilds_[
str(interaction.guild_id)]["filterDelete"]
embed = Embeds(
title="Filter Menu",
description=f"""Change the settings of your filter and custom filter.
Filter: {cur_filter}
Delete Messages: {cur_type}
"""
)
await interaction.response.edit_message(embed=embed, view=self.bot.Filter(self.bot))
@button(
label="Custom Filter",
style=ButtonStyle.green
)
async def custom(self, button: Button, interaction: Interaction):
cur_words = self.bot.client.guilds_[
str(interaction.guild_id)]["filterWords"]
embed = Embeds(
title="Filter Menu",
description=f"""Change the settings of your custom filter.
Custom Filter Words: {cur_words}
"""
)
await interaction.response.edit_message(embed=embed, view=self.bot.custom(self.bot))
@button(
label="Main Menu",
row=1,
style=ButtonStyle.blurple)
async def receive(self, button: Button, interaction: Interaction):
embed = Embeds(
title=f"{interaction.guild.name}'s Settings",
colour=0x000e8a302)
fields = [
("Changing Comand Prefix",
f"Change how users on your server use commands on {self.bot.client.user.name}"),
("Filter setting",
f"Change how the filter reacts, if its on and the custom filter"),
("LogChannel",
f"Change the log channel that your server uses."),
("Action Channel",
f"Change the action channel for {interaction.guild.name}."),
("Welcome User Message",
f"Open the welcome message editor"),
]
for name, value in fields:
embed.add_field(name=name, value=value, inline=False)
msg: Message = await interaction.response.edit_message(embed=embed, view=self.bot.menu(self.bot))
class custom(View):
def __init__(self, bot):
super().__init__()
self.bot = bot
@button(
label="Add a Word",
style=ButtonStyle.blurple
)
async def add(self, button: Button, interaction: Interaction):
message = interaction.message
await interaction.message.edit(view=None)
cur_list = self.bot.client.guilds_[
str(interaction.guild_id)]["filterWords"]
if cur_list == None:
cur_list = []
await interaction.response.send_message("What word do you want to add to the filter?")
msg = await self.bot.client.wait_for("message", check=lambda u: u.author.id == interaction.user.id)
cur_list.append(msg.content.lower())
self.bot.client.config.put_config_guild(
interaction.guild.id, {"filterWords": cur_list})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
cur_words = self.bot.client.guilds_[
str(interaction.guild_id)]["filterWords"]
embed = Embeds(
title="Filter Menu",
description=f"""Change the settings of your custom filter.
Custom Filter Words: {cur_words}
"""
)
await interaction.delete_original_message()
try:
await msg.delete()
except NotFound:
pass
await message.edit(embed=embed, view=self.bot.custom(self.bot))
@button(
label="Remove a Word",
style=ButtonStyle.red
)
async def remove(self, button: Button, interaction: Interaction):
message = interaction.message
await interaction.message.edit(view=None)
cur_list: list = self.bot.client.guilds_[
str(interaction.guild_id)]["filterWords"]
if cur_list == None:
cur_list = []
await interaction.response.send_message("What word do you want to remove from the filter?")
msg = await self.bot.client.wait_for("message", check=lambda u: u.author.id == interaction.user.id)
cur_list.remove(msg.content.lower())
self.bot.client.config.put_config_guild(
interaction.guild.id, {"filterWords": cur_list})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
cur_words = self.bot.client.guilds_[
str(interaction.guild_id)]["filterWords"]
embed = Embeds(
title="Filter Menu",
description=f"""Change the settings of your custom filter.
Custom Filter Words: {cur_words}
"""
)
await interaction.delete_original_message()
try:
await msg.delete()
except NotFound:
pass
await message.edit(embed=embed, view=self.bot.custom(self.bot))
@button(
label="Remove All",
style=ButtonStyle.red
)
async def all(self, button: Button, interaction: Interaction):
self.bot.client.config.put_config_guild(
interaction.guild.id, {"filterWords": None})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
cur_words = self.bot.client.guilds_[
str(interaction.guild_id)]["filterWords"]
embed = Embeds(
title="Filter Menu",
description=f"""Change the settings of your custom filter.
Custom Filter Words: {cur_words}
"""
)
await interaction.response.edit_message(embed=embed, view=self.bot.custom(self.bot))
@button(
label="Return to Filter Menu",
style=ButtonStyle.grey
)
async def mainmenu(self, button: Button, interaction: Interaction):
cur_filter = self.bot.client.guilds_[
str(interaction.guild_id)]["filter"]
cur_type = self.bot.client.guilds_[
str(interaction.guild_id)]["filterDelete"]
embed = Embeds(
title="Filter Menu",
description=f"""Change the settings of your filter and custom filter.
Filter: {cur_filter}
Delete Messages: {cur_type}
"""
)
await interaction.response.edit_message(embed=embed, view=self.bot.Filter(self.bot))
class log(View):
def __init__(self, bot):
super().__init__()
self.bot = bot
@button(
label="Change Log Channel",
custom_id="change",
style=ButtonStyle.green,
row=0)
async def recieve(self, button: Button, interaction: Interaction):
await interaction.response.defer()
embed = Embeds(
title="Change Log Channel",
description="Type your new log channel below, either #ChannelName or channel id"
)
while True:
cur_log = self.bot.client.guilds_[
str(interaction.guild.id)]["logChannel"]
message: Message = interaction.message
await message.edit(embed=embed, view=None)
valueObj: Message = await self.bot.client.wait_for("message", timeout=120, check=lambda i: i.author.id == interaction.user.id)
await valueObj.delete()
if len(valueObj.channel_mentions) != 0:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"logChannel": valueObj.channel_mentions[0].id})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
else:
try:
msg = int(valueObj.content)
channel = self.bot.client.fetch_channel(msg)
self.bot.client.config.put_config_guild(
interaction.guild.id, {"logChannel": channel.id})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
except:
embed.add_field(
name="Error", value="Make sure you are entering either the id of the channel or mention the channel by typing # then the channel name")
await message.edit(embed=embed)
continue
break
msg = interaction.message
menu = self.bot.Prefix(self.bot)
if self.bot.client.guilds_[
str(interaction.guild_id)]["logChannel"] != None:
cur_channel = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["logChannel"]))
else:
cur_channel = None
embed = await logEmbed(self, button, interaction)
await message.edit(embed=embed, view=self.bot.log(self.bot))
@button(
label="Main Menu",
custom_id="menu",
style=ButtonStyle.blurple)
async def receive(self, button: Button, interaction: Interaction):
embed = Embeds(
title=f"{interaction.guild.name}'s Settings",
colour=0x000e8a302)
fields = [
("Changing Comand Prefix",
f"Change how users on your server use commands on {self.bot.client.user.name}"),
("Filter setting",
f"Change how the filter reacts, if its on and the custom filter"),
("LogChannel",
f"Change the log channel that your server uses."),
("Action Channel",
f"Change the action channel for {interaction.guild.name}."),
("Welcome User Message",
f"Open the welcome message editor"),
]
for name, value in fields:
embed.add_field(name=name, value=value, inline=False)
msg: Message = await interaction.response.edit_message(embed=embed, view=self.bot.menu(self.bot))
class action(View):
def __init__(self, bot):
super().__init__()
self.bot = bot
@button(
label="Change Action Log Channel",
custom_id="change",
style=ButtonStyle.green,
row=0)
async def recieve(self, button: Button, interaction: Interaction):
await interaction.response.defer()
embed = Embeds(
title="Change Action Log Channel",
description="Type your new action log channel below, either #ChannelName or channel id"
)
while True:
cur_log = self.bot.client.guilds_[
str(interaction.guild.id)]["actionChannel"]
message: Message = interaction.message
await message.edit(embed=embed, view=None)
valueObj: Message = await self.bot.client.wait_for("message", timeout=120, check=lambda i: i.author.id == interaction.user.id)
await valueObj.delete()
if len(valueObj.channel_mentions) != 0:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"actionChannel": valueObj.channel_mentions[0].id})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
else:
try:
msg = int(valueObj.content)
channel = self.bot.client.fetch_channel(msg)
self.bot.client.config.put_config_guild(
interaction.guild.id, {"actionChannel": channel.id})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
except:
embed.add_field(
name="Error", value="Make sure you are entering either the id of the channel or mention the channel by typing # then the channel name")
await message.edit(embed=embed)
continue
break
msg = interaction.message
menu = self.bot.Prefix(self.bot)
if self.bot.client.guilds_[
str(interaction.guild_id)]["actionChannel"] != None:
cur_channel = await self.bot.client.fetch_channel(int(self.bot.client.guilds_[
str(interaction.guild_id)]["actionChannel"]))
else:
cur_channel = None
embed = Embeds(
title="Action Log Channel Menu",
description=f"""Change the settings of your action log channel.
Action Log Channel: {cur_channel}
"""
)
await message.edit(embed=embed, view=self.bot.log(self.bot))
@button(
label="Main Menu",
custom_id="menu",
style=ButtonStyle.blurple)
async def receive(self, button: Button, interaction: Interaction):
embed = Embeds(
title=f"{interaction.guild.name}'s Settings",
colour=0x000e8a302)
fields = [
("Changing Comand Prefix",
f"Change how users on your server use commands on {self.bot.client.user.name}"),
("Filter setting",
f"Change how the filter reacts, if its on and the custom filter"),
("LogChannel",
f"Change the log channel that your server uses."),
("Action Channel",
f"Change the action channel for {interaction.guild.name}."),
("Welcome User Message",
f"Open the welcome message editor"),
]
for name, value in fields:
embed.add_field(name=name, value=value, inline=False)
msg: Message = await interaction.response.edit_message(embed=embed, view=self.bot.menu(self.bot))
class welcomeText(View):
def __init__(self, bot):
super().__init__()
self.bot = bot
@button(
label="General:",
style=ButtonStyle.red,
disabled=True
)
async def generalTitle(self, button: Button, interaction: Interaction):
pass
@button(
label="Turn On/Off",
style=ButtonStyle.gray
)
async def turn(self, button: Button, interaction: Interaction):
await interaction.response.defer()
if self.bot.client.guilds_[str(interaction.guild_id)]["welcomeMessage"]:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeMessage": False})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
else:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeMessage": True})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
@button(
label="Channel?",
style=ButtonStyle.blurple
)
async def channel(self, button: Button, interaction: Interaction):
await interaction.response.defer()
embed = Embeds(
title="Change the welcome channel",
description="Type the channel below that you want the welcome messages in by doing #ChannelName"
)
await interaction.message.edit(embed=embed, view=None)
msg: Message = await self.bot.client.wait_for("message", check=lambda i: i.author.id == interaction.user.id)
await msg.delete()
channel = msg.channel_mentions[0].id
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeChannel": channel})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
@button(
label="Toggle Type",
style=ButtonStyle.blurple
)
async def toggle(self, button: Button, interaction: Interaction):
await interaction.response.defer()
if self.bot.client.guilds_[str(interaction.guild_id)]["welcomeType"]:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeType": False})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
else:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeType": True})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
@button(
label="Test message",
style=ButtonStyle.grey
)
async def test(self, button: Button, interaction: Interaction):
cur_text : str= self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeText"]
user = interaction.user.name
server = interaction.guild.name
cur_text = cur_text.format(user=user, server=server)
await interaction.channel.send(f"{cur_text}", delete_after=10)
@button(
label="Text-Based:",
style=ButtonStyle.red,
row=1,
disabled=True
)
async def textTitle(self, button: Button, interaction: Interaction):
pass
@button(
label="Change Text",
style=ButtonStyle.green,
row=1
)
async def text(self, button: Button, interaction: Interaction):
await interaction.response.defer()
embed = Embeds(
title="Change the welcome text",
description="Type below what you would like your welcome message to say.\n\nSpecial Arugments:\n Server Name: {server}\nUsername: {user}"
)
await interaction.message.edit(embed=embed, view=None)
msg = await self.bot.client.wait_for("message", check=lambda i: i.author.id == interaction.user.id)
await msg.delete()
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeText": msg.content})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
@button(
label="Banner-Based:",
style=ButtonStyle.red,
row=2,
disabled=True
)
async def bannerTitle(self, button: Button, interaction: Interaction):
pass
@button(
label="Background Color",
style=ButtonStyle.green,
disabled=True,
row=2
)
async def background(self, button: Button, interaction: Interaction):
pass
@button(
label="Banner",
style=ButtonStyle.green,
disabled=True,
row=2
)
async def banner(self, button: Button, interaction: Interaction):
pass
@button(
label="Icon",
style=ButtonStyle.green,
disabled=True,
row=2
)
async def icon(self, button: Button, interaction: Interaction):
pass
@button(
label="Color Menu",
style=ButtonStyle.green,
disabled=True,
row=2)
async def colour(self, button: Button, interaction: Interaction):
pass
class welcomeBanner(View):
def __init__(self, bot):
super().__init__(timeout=0)
self.bot = bot
@button(
label="General:",
style=ButtonStyle.red,
disabled=True
)
async def generalTitle(self, button: Button, interaction: Interaction):
pass
@button(
label="Turn On/Off",
style=ButtonStyle.gray
)
async def turn(self, button: Button, interaction: Interaction):
await interaction.response.defer()
if self.bot.client.guilds_[str(interaction.guild_id)]["welcomeMessage"]:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeMessage": False})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
else:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeMessage": True})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
@button(
label="Channel?",
style=ButtonStyle.blurple
)
async def channel(self, button: Button, interaction: Interaction):
await interaction.response.defer()
embed = Embeds(
title="Change the welcome channel",
description="Type the channel below that you want the welcome messages in by doing #ChannelName"
)
await interaction.message.edit(embed=embed, view=None)
msg: Message = await self.bot.client.wait_for("message", check=lambda i: i.author.id == interaction.user.id)
await msg.delete()
channel = msg.channel_mentions[0].id
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeChannel": channel})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
@button(
label="Toggle Type",
style=ButtonStyle.blurple
)
async def toggle(self, button: Button, interaction: Interaction):
await interaction.response.defer()
if self.bot.client.guilds_[str(interaction.guild_id)]["welcomeType"]:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeType": False})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
else:
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeType": True})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
@button(
label="Test message",
style=ButtonStyle.grey
)
async def test(self, button: Button, interaction: Interaction):
cur_bg = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeBack"]
cur_banner = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeBanner"]
cur_icon = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeIcon"]
cur_colour_txt = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeTxtColor"]
cur_colour_user = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeUserColor"]
cur_colour_members = self.bot.client.guilds_[
str(interaction.guild_id)]["welcomeMembersColor"]
api_ini = "sancus/data/api.ini"
api_data = ConfigParser()
with open(api_ini) as f:
api_data.read_file(f)
headers = {
"Authorization" : api_data["FluxPoint"]["api_token"]
}
data = {
"username": f"{interaction.user.name}#{interaction.user.discriminator}",
"avatar": interaction.user.avatar.url,
"background": f"#{cur_bg}",
"members": "member #1",
"icon": cur_icon,
"banner": cur_banner,
"color_welcome": f"#{cur_colour_txt}",
"color_username": f"#{cur_colour_user}",
"color_members": f"#{cur_colour_members}",
}
request = requests.get("https://api.fluxpoint.dev/gen/welcome", headers=headers, json=data)
if request.ok:
image = io.BytesIO(request.content)
file = discord.File(image, filename="image.png")
embed = Embeds()
embed.set_image(url="attachment://image.png")
await interaction.channel.send(file=file, embed=embed, delete_after=10)
else:
await interaction.channel.send(json.loads(request.content)["message"])
@button(
label="Text-Based:",
style=ButtonStyle.red,
row=1,
disabled=True
)
async def textTitle(self, button: Button, interaction: Interaction):
pass
@button(
label="Change Text",
style=ButtonStyle.green,
row=1,
disabled=True
)
async def text(self, button: Button, interaction: Interaction):
pass
@button(
label="Banner-Based:",
style=ButtonStyle.red,
row=2,
disabled=True
)
async def bannerTitle(self, button: Button, interaction: Interaction):
pass
@button(
label="Background Color",
style=ButtonStyle.green,
row=2
)
async def background(self, button: Button, interaction: Interaction):
await interaction.response.defer()
embed = Embeds(
title="Change the background colour",
description="Type the hex code that you want to use for the background of the banner.\n Find the hex codes here: \n**https://htmlcolorcodes.com/**"
)
await interaction.message.edit(embed=embed, view=None)
msg: Message = await self.bot.client.wait_for("message", check=lambda i: i.author.id == interaction.user.id)
await msg.delete()
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeBack": msg.content})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
@button(
label="Banner",
style=ButtonStyle.green,
row=2
)
async def banner(self, button: Button, interaction: Interaction):
premades = [
"love",
"mountain",
"purplewave",
"rainbow",
"space",
"sunset",
"swamp",
"waifubot",
"wave"
]
await interaction.response.defer()
embed = Embeds(
title="Change the banner image",
description=f"Enter either one of the following banner types or enter a custom image link.\n Link **must** end in .jpg or .png\n{premades}"
)
await interaction.message.edit(embed=embed, view=None)
msg: Message = await self.bot.client.wait_for("message", check=lambda i: i.author.id == interaction.user.id)
await msg.delete()
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeBanner": msg.content})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
@button(
label="Icon",
style=ButtonStyle.green,
row=2
)
async def icon(self, button: Button, interaction: Interaction):
premades = [
"cat",
"chika",
"dog",
"dragon",
"neko",
"nyancat",
"pepe",
"pikachu",
"senko",
"shrek"
]
await interaction.response.defer()
embed = Embeds(
title="Change the icon image",
description=f"Enter either one of the following icon types or enter a custom image link.\n Link **must** end in .jpg or .png\n{premades}"
)
await interaction.message.edit(embed=embed, view=None)
msg: Message = await self.bot.client.wait_for("message", check=lambda i: i.author.id == interaction.user.id)
await msg.delete()
self.bot.client.config.put_config_guild(
interaction.guild.id, {"welcomeIcon": msg.content})
self.bot.client.guilds_ = self.bot.client.config.get_config_guilds()
await welcomeEmbed(self, button, interaction)
"""@button(
label="Color Menu",
style=ButtonStyle.green,
row=2, disabled=True)
async def colour(self, button: Button, interaction: Interaction):
pass"""
| 38.955403
| 206
| 0.546585
| 4,612
| 45,422
| 5.272767
| 0.067216
| 0.06707
| 0.092483
| 0.071881
| 0.87573
| 0.859857
| 0.845382
| 0.838926
| 0.833333
| 0.791595
| 0
| 0.002884
| 0.351217
| 45,422
| 1,165
| 207
| 38.988841
| 0.822322
| 0
| 0
| 0.716842
| 0
| 0.012632
| 0.199539
| 0.015796
| 0
| 0
| 0.00122
| 0
| 0
| 1
| 0.008421
| false
| 0.013684
| 0.013684
| 0
| 0.032632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e4d5a6125f02330eecd2a6048821f70e6e07c980
| 169
|
py
|
Python
|
src/clearskies/contexts/__init__.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | 4
|
2021-04-23T18:13:06.000Z
|
2022-03-26T01:51:01.000Z
|
src/clearskies/contexts/__init__.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | null | null | null |
src/clearskies/contexts/__init__.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | null | null | null |
from .cli import cli
from .test import test
from .wsgi import wsgi
from .aws_lambda_api_gateway import aws_lambda_api_gateway
from .aws_lambda_elb import aws_lambda_elb
| 28.166667
| 58
| 0.852071
| 30
| 169
| 4.466667
| 0.333333
| 0.268657
| 0.19403
| 0.283582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118343
| 169
| 5
| 59
| 33.8
| 0.899329
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e4edee5eb5bd9e97d196b7ce8841da0fc559573d
| 5,644
|
py
|
Python
|
tests/test_analysis_grid.py
|
Spect4tor/python-graspfile
|
b1d623c018edb5f27714b083e967d924527b7509
|
[
"MIT"
] | 1
|
2021-11-08T09:46:03.000Z
|
2021-11-08T09:46:03.000Z
|
tests/test_analysis_grid.py
|
Spect4tor/python-graspfile
|
b1d623c018edb5f27714b083e967d924527b7509
|
[
"MIT"
] | 6
|
2020-05-07T18:42:16.000Z
|
2021-12-02T17:22:34.000Z
|
tests/test_analysis_grid.py
|
PaulKGrimes/python-graspfile
|
4ab4b652a615344596699eda92b6a744b1321bbf
|
[
"MIT"
] | 1
|
2021-11-04T11:02:29.000Z
|
2021-11-04T11:02:29.000Z
|
# test_analysis_grid.py
import numpy as np
import pytest
from graspfile import grid
from graspfile.analysis import grid as ga
test_grid_file = "tests/test_data/grasp_files/square_aperture.grd"
"""TICRA Tools 19.1 GRASP Grid file, consisting of three grids at 82, 97 and 112 GHz."""
@pytest.fixture
def empty_grasp_grid():
"""Return an empty GraspGrid instance."""
return grid.GraspGrid()
@pytest.fixture
def grid_file():
"""Return a file object that points to a GRASP Grid file"""
return open(test_grid_file)
@pytest.fixture
def filled_grasp_grid(empty_grasp_grid, grid_file):
"""Return a GraspGrid instance filled from the grid_file."""
empty_grasp_grid.read(grid_file)
grid_file.close()
return empty_grasp_grid
@pytest.fixture
def filled_grasp_field(filled_grasp_grid):
"""Return a GraspField instance from the filled_grasp_grid fixture"""
return filled_grasp_grid.fields[0]
def test_get_value(filled_grasp_field):
"""Test getting a value from the field"""
x_vals, y_vals = filled_grasp_field.positions_1d
# Test values of xv, yv in x_vals, y_vals
xv = x_vals[int(len(x_vals)/3)]
yv = y_vals[int(2*len(y_vals)/3)]
value = filled_grasp_field.get_value(xv, yv)
assert len(value) == filled_grasp_field.field_components
# Test values of xv, yv not in x_vals, y_vals
xv = (x_vals[int(len(x_vals)/3)] + x_vals[int(len(x_vals)/3)+1])/2
yv = (y_vals[int(2*len(y_vals)/3)] + y_vals[int(2*len(y_vals)/3)-1])/2
value = filled_grasp_field.get_value(xv, yv)
assert len(value) == filled_grasp_field.field_components
def test_finding_peak(filled_grasp_field):
"""Test find the peak of a grasp field"""
x_peak, y_peak = ga.find_peak(filled_grasp_field)
assert x_peak >= filled_grasp_field.grid_min_x
assert x_peak <= filled_grasp_field.grid_max_x
assert y_peak >= filled_grasp_field.grid_min_y
assert y_peak <= filled_grasp_field.grid_max_y
def test_finding_peak_comp1(filled_grasp_field):
"""Test find the peak of a grasp field"""
x_peak, y_peak = ga.find_peak(filled_grasp_field, comp=1)
assert x_peak >= filled_grasp_field.grid_min_x
assert x_peak <= filled_grasp_field.grid_max_x
assert y_peak >= filled_grasp_field.grid_min_y
assert y_peak <= filled_grasp_field.grid_max_y
def test_finding_peak_min_radius(filled_grasp_field):
"""Test find the peak of a grasp field"""
radii = filled_grasp_field.radius_grid()
shape = radii.shape
min_radius = radii[int(shape[0]/3), int(shape[1]/3)]
x_peak, y_peak = ga.find_peak(filled_grasp_field, min_radius=min_radius)
assert x_peak >= filled_grasp_field.grid_min_x
assert x_peak <= filled_grasp_field.grid_max_x
assert y_peak >= filled_grasp_field.grid_min_y
assert y_peak <= filled_grasp_field.grid_max_y
def test_finding_peak_max_radius(filled_grasp_field):
"""Test find the peak of a grasp field"""
radii = filled_grasp_field.radius_grid()
shape = radii.shape
max_radius = radii[int(shape[0] / 5), int(shape[1] / 5)]
x_peak, y_peak = ga.find_peak(filled_grasp_field, max_radius=max_radius)
assert x_peak >= filled_grasp_field.grid_min_x
assert x_peak <= filled_grasp_field.grid_max_x
assert y_peak >= filled_grasp_field.grid_min_y
assert y_peak <= filled_grasp_field.grid_max_y
def test_finding_center(filled_grasp_field):
"""Test find the center of a grasp field"""
x_cent, y_cent = ga.find_center(filled_grasp_field)
assert x_cent >= filled_grasp_field.grid_min_x
assert x_cent <= filled_grasp_field.grid_max_x
assert y_cent >= filled_grasp_field.grid_min_y
assert y_cent <= filled_grasp_field.grid_max_y
def test_finding_center_comp1(filled_grasp_field):
"""Test find the center of a grasp field"""
x_cent, y_cent = ga.find_center(filled_grasp_field, comp=1)
assert x_cent >= filled_grasp_field.grid_min_x
assert x_cent <= filled_grasp_field.grid_max_x
assert y_cent >= filled_grasp_field.grid_min_y
assert y_cent <= filled_grasp_field.grid_max_y
def test_finding_center_trunc(filled_grasp_field):
"""Test find the center of a grasp field"""
x_peak, y_peak = ga.find_peak(filled_grasp_field)
peak = filled_grasp_field.get_value(x_peak, y_peak)
trunc = np.abs(peak[0])/5.0
x_cent, y_cent = ga.find_center(filled_grasp_field, trunc_level=trunc)
assert x_cent >= filled_grasp_field.grid_min_x
assert x_cent <= filled_grasp_field.grid_max_x
assert y_cent >= filled_grasp_field.grid_min_y
assert y_cent <= filled_grasp_field.grid_max_y
def test_finding_center_min_radius(filled_grasp_field):
"""Test find the center of a grasp field"""
radii = filled_grasp_field.radius_grid()
shape = radii.shape
min_radius = radii[int(shape[0]/3), int(shape[1]/3)]
x_cent, y_cent = ga.find_center(filled_grasp_field, min_radius=min_radius)
assert x_cent >= filled_grasp_field.grid_min_x
assert x_cent <= filled_grasp_field.grid_max_x
assert y_cent >= filled_grasp_field.grid_min_y
assert y_cent <= filled_grasp_field.grid_max_y
def test_finding_center_max_radius(filled_grasp_field):
"""Test find the center of a grasp field"""
radii = filled_grasp_field.radius_grid()
shape = radii.shape
max_radius = radii[int(shape[0] / 5), int(shape[1] / 5)]
x_cent, y_cent = ga.find_center(filled_grasp_field, max_radius=max_radius)
assert x_cent >= filled_grasp_field.grid_min_x
assert x_cent <= filled_grasp_field.grid_max_x
assert y_cent >= filled_grasp_field.grid_min_y
assert y_cent <= filled_grasp_field.grid_max_y
| 32.624277
| 88
| 0.745571
| 961
| 5,644
| 3.987513
| 0.093652
| 0.19833
| 0.279749
| 0.187891
| 0.79358
| 0.764875
| 0.764875
| 0.755219
| 0.74739
| 0.724426
| 0
| 0.010144
| 0.161588
| 5,644
| 172
| 89
| 32.813953
| 0.799662
| 0.121368
| 0
| 0.591837
| 0
| 0
| 0.009796
| 0.009796
| 0
| 0
| 0
| 0
| 0.387755
| 1
| 0.142857
| false
| 0
| 0.040816
| 0
| 0.22449
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5f74c602c7d0aeb2dbaa4c98e732dd354ceca255
| 70
|
py
|
Python
|
test_child.py
|
summerSpyDad/test_repo
|
ca7982182833bbce5c34f0460db67297fa8d2d62
|
[
"CC0-1.0"
] | null | null | null |
test_child.py
|
summerSpyDad/test_repo
|
ca7982182833bbce5c34f0460db67297fa8d2d62
|
[
"CC0-1.0"
] | null | null | null |
test_child.py
|
summerSpyDad/test_repo
|
ca7982182833bbce5c34f0460db67297fa8d2d62
|
[
"CC0-1.0"
] | null | null | null |
#Hello
print('hello from the test child')
print('hello from Theia')
| 11.666667
| 34
| 0.714286
| 11
| 70
| 4.545455
| 0.636364
| 0.4
| 0.56
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157143
| 70
| 5
| 35
| 14
| 0.847458
| 0.071429
| 0
| 0
| 0
| 0
| 0.650794
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
8442cc4d8d2e47a2e9c73a12c33ccc1b790c2c7a
| 6,514
|
py
|
Python
|
construct/protocols/application/http.py
|
MostAwesomeDude/construct
|
dad8ecaf5ce75c7a66cd995bfc137d1ad368e290
|
[
"MIT"
] | 89
|
2015-01-01T03:27:45.000Z
|
2022-02-22T11:18:20.000Z
|
construct/protocols/application/http.py
|
jesseward/construct
|
88536c57b5de4c94c7f6cf6b8355ab1a16755ed4
|
[
"MIT"
] | 8
|
2016-08-23T13:06:25.000Z
|
2021-08-03T13:47:52.000Z
|
construct/protocols/application/http.py
|
jesseward/construct
|
88536c57b5de4c94c7f6cf6b8355ab1a16755ed4
|
[
"MIT"
] | 29
|
2016-12-17T04:17:10.000Z
|
2022-03-17T06:15:45.000Z
|
"""
Hyper Text Transfer Protocol (TCP/IP protocol stack)
Construct is not meant for text manipulation, and is probably not the right
tool for the job, but I wanted to demonstrate how this could be done using
the provided `text` module.
"""
from construct import *
from construct.text import *
class HttpParamDictAdapter(Adapter):
"""turns the sequence of params into a dict"""
def _encode(self, obj, context):
return [Container(name = k, value = v) for k, v in obj.iteritems()]
def _decode(self, obj, context):
return dict((o.name, o.value) for o in obj)
lineterm = Literal("\r\n")
space = Whitespace()
# http parameter: 'name: value\r\n'
http_param = Struct("params",
StringUpto("name", ":\r\n"),
Literal(":"),
space,
StringUpto("value", "\r"),
lineterm,
)
http_params = HttpParamDictAdapter(
OptionalGreedyRange(http_param)
)
# request: command and params
http_request = Struct("request",
StringUpto("command", " "),
space,
StringUpto("url", " "),
space,
Literal("HTTP/"),
StringUpto("version", "\r"),
lineterm,
http_params,
lineterm,
)
# reply: header (answer and params) and data
http_reply = Struct("reply",
Literal("HTTP/"),
StringUpto("version", " "),
space,
DecNumber("code"),
space,
StringUpto("text", "\r"),
lineterm,
http_params,
lineterm,
HexDumpAdapter(
Field("data", lambda ctx: int(ctx["params"]["Content-length"]))
),
)
# session: request followed reply
http_session = Struct("session",
http_request,
http_reply,
)
if __name__ == "__main__":
cap1 = (
"474554202f636e6e2f2e656c656d656e742f696d672f312e352f6365696c696e672f6e"
"61765f706970656c696e655f646b626c75652e67696620485454502f312e310d0a486f"
"73743a20692e636e6e2e6e65740d0a557365722d4167656e743a204d6f7a696c6c612f"
"352e30202857696e646f77733b20553b2057696e646f7773204e5420352e313b20656e"
"2d55533b2072763a312e382e3129204765636b6f2f3230303631303130204669726566"
"6f782f322e300d0a4163636570743a20696d6167652f706e672c2a2f2a3b713d302e35"
"0d0a4163636570742d4c616e67756167653a20656e2d75732c656e3b713d302e350d0a"
"4163636570742d456e636f64696e673a20677a69702c6465666c6174650d0a41636365"
"70742d436861727365743a2049534f2d383835392d312c7574662d383b713d302e372c"
"2a3b713d302e370d0a4b6565702d416c6976653a203330300d0a436f6e6e656374696f"
"6e3a206b6565702d616c6976650d0a526566657265723a20687474703a2f2f7777772e"
"636e6e2e636f6d2f0d0a0d0a485454502f312e3120323030204f4b0d0a446174653a20"
"53756e2c2031302044656320323030362031373a34383a303120474d540d0a53657276"
"65723a204170616368650d0a436f6e74656e742d747970653a20696d6167652f676966"
"0d0a457461673a202266313232383761352d63642d3562312d30220d0a4c6173742d6d"
"6f6469666965643a204d6f6e2c2032372046656220323030362032323a33393a303920"
"474d540d0a436f6e74656e742d6c656e6774683a20313435370d0a4163636570742d72"
"616e6765733a2062797465730d0a4b6565702d416c6976653a2074696d656f75743d35"
"2c206d61783d313032340d0a436f6e6e656374696f6e3a204b6565702d416c6976650d"
"0a0d0a47494638396148001600f7000037618d436a94ebf0f4cad5e1bccad93a638fd2"
"dce639628e52769c97adc44c7299426a93dce3eb6182a5dee5ec5d7fa338628d466d95"
"88a1bb3c65907b97b4d43f3ba7bacdd9e1eaa6b8cce6ebf1dc5a59cc1313718faed8e0"
"e99fb3c8ced9e350759b6989aa6787a85e80a391a8c0ffffffbbc9d8b1c2d3e0e7eed1"
"dae5c2cfdcd2dbe57c98b4e7ecf23b648f587ba098aec4859eb9e4e9ef3e67918aa3bc"
"aebfd17793b1cfd9e4abbdcfbfcddbb3c3d44b71995a7da13f6791a5b8cccbd6e17491"
"b051759cd535327390afc7d2dfb8c7d7b0c0d24e739a7693b19bb0c64f749ac3cfdd49"
"6f97afc0d14f749b3d66916e8cacb167758ba3bdd84b4c476e96c8d4e0d84340406892"
"597ca0d53331adbed0a3b7cb52779d6f8ead9eb2c87a96b3a6b9cc567a9f94aac294ab"
"c24b70985a7ca1b5c5d5b9c8d7aabccfd94849819bb7acbdd0c5d1dedb5253486f9744"
"6c95da4943ae3832b7464fc40e0e3d659096acc3546d93c63c42796b88dce4eb815b74"
"d02d1e9db2c7dc4a4a89a1bbc2393cd8413e9aafc5d01d1eb7c6d6da4142d43837c542"
"48d3dce6687897d3322a829cb8d93438b2c2d3cd2120c4d1dd95abc3d6dfe8ca0e0cd8"
"4c45e1e7eeb6c5d5cdd7e2d93c3c6c8bab5f5a73b14c56c6282b5b6386cd2826cf2829"
"d5dee73e638c9f788acf3626686683436790d02724d32f2f7f728cde6261dd6864df6d"
"6bc0353ecc3537dd545499617387637a864a5e8e697fd437388ca5be90a7c085687e8f"
"a6bfd31d1e48648ce26665476d96d93137cd100fcb4944587195c02e34cd1619d94342"
"7d7a95da4141da4343d63930d73c3399677bc3d0ddd22a2ad01f22d42f2d6d7d9dd124"
"1de14b516384a6c64c52a64b58ab49514969915b7ea2c3636a734a5daa5255d9454468"
"87a9bb3439be3b39dc353ecf26245e7396bc444c585d806081a46283a6dd615dd74a46"
"dd675dd74138c90909dbe2ea6d8cac834d6489a2bcb15a65c34851b8636d54789e5679"
"9ec26e78ae5762c20000d0dae4955c68dde4ecc0676fe0e6ed87a0bb4a7098446b948c"
"a4bd8f6980aa39317d98b5c50b0d21f90400000000002c00000000480016000008ff00"
"01081c48b0a0c18308132a5c583000c38710234a04e070a2c58b122b62dcc8d1a0c68e"
"20377ec4c802038290080f24b08070e4453627d0b8406950828f160f0eba9c38228311"
"09340df2f0704f8c4e83b4b2d98a82e79fb703b77c455a06204e33816226e1100140e5"
"191f024d267c43a18005270a17241830e8e8c051fcb88d2b044f8e3860b0be914aa5ea"
"53bf6d02cd40da5206800d01fe189d2b500744c217022204729c10028d220edc0a74b5"
"2a0dbb6a98a8c1d160281d2f0dd7e8595b24f086010c5c007c3921d0c11726002e0df8"
"9153c18f79057a5ce8d10000901a066c00b8b2a40365292704680610cd8a103b02ed15"
"db706a8ea45d539471ff222450460a3e0a00207104a08100272978e4d9c7020500062c"
"b0a5d84124170a2e9e9c018e00fa7c90c4112d3c01803a5a48e71141d058b78940ed94"
"f30b20b1109385206d6c204c792b78915e17678cd14208000c80c0000a3651830561c4"
"20401766bcb1441004a447003e044c13c28c00f8b186830d1164ca1d6968f28a1e7f54"
"10c53a1590f38c31c8e062496b068011847a2a0ce442154a54e20e0060e8e001191444"
"e0c6070ba8a0440e5c994001013b70501c00d01149d047740493cc14c3e8c24a16adf4"
"d2082a9d4893491b7d08a4c3058401a00803035de14018393803050a4c5ca0861bf920"
"20c01b176061c01000d4034415304c100e0010c88e5204a50f16248a368984b2073388"
"00008a3cf100d08d39a5084442065bb597c4401390108109631c820e0058acc0001a33"
"c0b0c02364ccf20e005e1c01c10a17b001c00c6b5132dd450f64d0040d0909000e470f"
"78e0402deb5ef4c1315a1470d0016a2cc09104438e70101520bd00c4044119844d0c08"
"71d0f0c40c7549f1c506895102c61c53d1051125941010003b").decode("hex")
x = http_session.parse(cap1)
print x
#print x.request.url
#print x.request.params["Referer"]
#print x.reply.params["Server"]
#print "-" * 80
#print x
| 42.025806
| 76
| 0.831133
| 285
| 6,514
| 18.922807
| 0.564912
| 0.005563
| 0.007232
| 0.010569
| 0.010013
| 0
| 0
| 0
| 0
| 0
| 0
| 0.527141
| 0.10915
| 6,514
| 154
| 77
| 42.298701
| 0.402206
| 0.03669
| 0
| 0.123894
| 0
| 0
| 0.73416
| 0.71237
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.017699
| null | null | 0.00885
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fffba420a65361a8e22ab68696ebf21e41c08970
| 2,438
|
py
|
Python
|
code/Classifier/TypeHierarchy.py
|
INK-USC/DS-RelationExtraction
|
eebcfa7fd2eda5bba92f3ef8158797cdf91e6981
|
[
"MIT"
] | 156
|
2018-10-09T09:01:42.000Z
|
2019-12-25T09:07:47.000Z
|
code/Classifier/TypeHierarchy.py
|
pengdada98/USC-DS-RelationExtraction
|
eebcfa7fd2eda5bba92f3ef8158797cdf91e6981
|
[
"MIT"
] | 10
|
2018-10-12T11:54:58.000Z
|
2019-10-11T03:29:20.000Z
|
code/Classifier/TypeHierarchy.py
|
pengdada98/USC-DS-RelationExtraction
|
eebcfa7fd2eda5bba92f3ef8158797cdf91e6981
|
[
"MIT"
] | 64
|
2016-11-04T16:03:03.000Z
|
2018-07-20T18:03:00.000Z
|
__author__ = 'xiang'
from collections import defaultdict
class TypeSet:
def __init__(self, file_name, number_of_types):
self._type_hierarchy = {} # type -> [parent type]
self._subtype_mapping = defaultdict(list) # type -> [subtype]
self._root = set() # root types (on 1-level)
with open(file_name) as f:
for line in f:
try:
type, tid, freq = line.strip('\r\n').split('\t')
self._root.add(int(tid))
except Exception as e:
print e
pass
#self._root = list(set(range(0, number_of_types)).difference(self._root))
def get_type_path(self, label):
if label in self._type_hierarchy: # label has super type
path = [label]
while label in self._type_hierarchy:
path.append(self._type_hierarchy[label])
label = self._type_hierarchy[label]
path.reverse()
return path
else: # label is the root type
return [label]
def get_subtypes(self, label):
if label in self._subtype_mapping:
return self._subtype_mapping[label]
else:
return None
class TypeHierarchy:
def __init__(self, file_name, number_of_types):
self._type_hierarchy = {} # type -> [parent type]
self._subtype_mapping = defaultdict(list) # type -> [subtype]
self._root = set() # root types (on 1-level)
with open(file_name) as f:
for line in f:
t = line.strip('\r\n').split('\t')
self._type_hierarchy[int(t[0])] = int(t[1])
self._subtype_mapping[int(t[1])].append(int(t[0]))
self._root.add(int(t[0]))
self._root = list(set(range(0, number_of_types)).difference(self._root))
def get_type_path(self, label):
if label in self._type_hierarchy: # label has super type
path = [label]
while label in self._type_hierarchy:
path.append(self._type_hierarchy[label])
label = self._type_hierarchy[label]
path.reverse()
return path
else: # label is the root type
return [label]
def get_subtypes(self, label):
if label in self._subtype_mapping:
return self._subtype_mapping[label]
else:
return None
| 36.939394
| 81
| 0.561936
| 298
| 2,438
| 4.348993
| 0.228188
| 0.067901
| 0.14429
| 0.101852
| 0.858025
| 0.844136
| 0.844136
| 0.811728
| 0.811728
| 0.811728
| 0
| 0.005549
| 0.334701
| 2,438
| 65
| 82
| 37.507692
| 0.793465
| 0.117719
| 0
| 0.736842
| 0
| 0
| 0.007948
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.017544
| 0.017544
| null | null | 0.017544
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0817f26cbe267a536ef8edf14cccdb6b3282f347
| 2,574
|
py
|
Python
|
forum/permissions.py
|
rafaelbantu/timtec
|
86c51b7440a044704ed33c3e752a6cf6b15ceae3
|
[
"BSD-3-Clause"
] | 21
|
2015-09-23T14:07:16.000Z
|
2022-02-18T01:35:18.000Z
|
forum/permissions.py
|
rafaelbantu/timtec
|
86c51b7440a044704ed33c3e752a6cf6b15ceae3
|
[
"BSD-3-Clause"
] | 178
|
2016-05-10T16:16:19.000Z
|
2021-12-15T20:21:21.000Z
|
forum/permissions.py
|
rafaelbantu/timtec
|
86c51b7440a044704ed33c3e752a6cf6b15ceae3
|
[
"BSD-3-Clause"
] | 18
|
2015-10-23T13:28:17.000Z
|
2021-09-22T13:08:28.000Z
|
from rest_framework import permissions
class EditQuestionPermission(permissions.BasePermission):
"""
Custom permission to only allow owners of an object to edit it.
"""
def has_object_permission(self, request, view, obj):
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
elif request.user in obj.course.professors.all():
return True
elif request.user == obj.user:
return True
elif request.user.groups.filter(name="professors"):
# FIXME remove this after implement tutor professor
return True
elif obj.course.professors.filter(user=request.user):
course_professor = obj.course.professors.filter(user=request.user)
if course_professor.role == 'coordinator':
return True
elif course_professor.role == 'assistant':
try:
question_user_class = obj.user.classes.get(course=obj.course)
if question_user_class in request.user.professor_classes.all():
return True
except:
return False
class EditAnswerPermission(permissions.BasePermission):
"""
Custom permission to only allow owners of an object to edit it.
"""
def has_object_permission(self, request, view, obj):
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
elif request.user in obj.question.course.professors.all():
return True
elif request.user == obj.user:
return True
elif request.user.groups.filter(name="professors"):
# FIXME remove this after implement tutor professor
return True
elif obj.question.course.professors.filter(user=request.user):
course_professor = obj.question.course.professors.filter(user=request.user)
if course_professor.role == 'coordinator':
return True
elif course_professor.role == 'assistant':
try:
question_user_class = obj.user.classes.get(course=obj.question.course)
if question_user_class in request.user.professor_classes.all():
return True
except:
return False
| 42.196721
| 90
| 0.614996
| 285
| 2,574
| 5.473684
| 0.242105
| 0.076923
| 0.089744
| 0.080769
| 0.934615
| 0.934615
| 0.934615
| 0.934615
| 0.924359
| 0.853846
| 0
| 0
| 0.311966
| 2,574
| 60
| 91
| 42.9
| 0.880858
| 0.164724
| 0
| 0.744186
| 0
| 0
| 0.028382
| 0
| 0
| 0
| 0
| 0.033333
| 0
| 1
| 0.046512
| false
| 0
| 0.023256
| 0
| 0.44186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
083ccfe2eee0692d690ba486eb8b5c9d221d9c7a
| 7,947
|
py
|
Python
|
tests/utils/test_formjson.py
|
expobrain/mstrio-py
|
d389f3a1981eb7bb7a59df9fc6086e051c05ca74
|
[
"Apache-2.0"
] | null | null | null |
tests/utils/test_formjson.py
|
expobrain/mstrio-py
|
d389f3a1981eb7bb7a59df9fc6086e051c05ca74
|
[
"Apache-2.0"
] | 1
|
2019-09-30T07:02:06.000Z
|
2019-09-30T07:02:06.000Z
|
tests/utils/test_formjson.py
|
expobrain/mstrio-py
|
d389f3a1981eb7bb7a59df9fc6086e051c05ca74
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import pandas as pd
from mstrio.utils.formjson import formjson
def make_df():
raw_data = {'id_int': [1, 2, 3, 4, 5],
'id_str': ['1', '2', '3', '4', '5'],
'first_name': ['Jason', 'Molly', 'Tina', 'Jake', 'Amy'],
'last_name': ['Miller', 'Jacobson', "Turner", 'Milner', 'Cooze'],
'age': [42, 52, 36, 24, 73],
'weight': [100.22, 210.2, 175.1, 155.9, 199.9],
'state': ["VA", "NC", "WY", "CA", "CA"],
'salary': [50000, 100000, 75000, 85000, 250000]}
df = pd.DataFrame(raw_data, columns=['id_int', 'id_str', 'first_name ', 'last_name',
'age', 'weight', 'state', 'salary'])
return df
class TestFormjson(unittest.TestCase):
def test_formjson(self):
expected_json = tuple(([{'dataType': 'INTEGER', 'name': 'id_int'},
{'dataType': 'STRING', 'name': 'id_str'},
{'dataType': 'STRING', 'name': 'first_name '},
{'dataType': 'STRING', 'name': 'last_name'},
{'dataType': 'INTEGER', 'name': 'age'},
{'dataType': 'DOUBLE', 'name': 'weight'},
{'dataType': 'STRING', 'name': 'state'},
{'dataType': 'INTEGER', 'name': 'salary'}],
[{'attributeForms': [{'category': 'ID', 'expressions': [{'formula': 'TEST.id_str'}]}],
'name': 'id_str'},
{'attributeForms': [
{'category': 'ID', 'expressions': [{'formula': 'TEST.first_name '}]}],
'name': 'first_name '},
{'attributeForms': [{'category': 'ID', 'expressions': [{'formula': 'TEST.last_name'}]}],
'name': 'last_name'},
{'attributeForms': [{'category': 'ID', 'expressions': [{'formula': 'TEST.state'}]}],
'name': 'state'}],
[{'dataType': 'number',
'expressions': [{'formula': 'TEST.id_int'}],
'name': 'id_int'},
{'dataType': 'number',
'expressions': [{'formula': 'TEST.age'}],
'name': 'age'},
{'dataType': 'number',
'expressions': [{'formula': 'TEST.weight'}],
'name': 'weight'},
{'dataType': 'number',
'expressions': [{'formula': 'TEST.salary'}],
'name': 'salary'}]))
created_json = formjson(df=make_df(), table_name='TEST')
self.assertEquals(created_json, expected_json)
def test_formjson_with_attribute_override(self):
expected_json = tuple(([{'dataType': 'INTEGER', 'name': 'id_int'},
{'dataType': 'STRING', 'name': 'id_str'},
{'dataType': 'STRING', 'name': 'first_name '},
{'dataType': 'STRING', 'name': 'last_name'},
{'dataType': 'INTEGER', 'name': 'age'},
{'dataType': 'DOUBLE', 'name': 'weight'},
{'dataType': 'STRING', 'name': 'state'},
{'dataType': 'INTEGER', 'name': 'salary'}],
[{'attributeForms': [{'category': 'ID',
'expressions': [{'formula': 'TEST.id_int'}]}],
'name': 'id_int'},
{'attributeForms': [{'category': 'ID',
'expressions': [{'formula': 'TEST.id_str'}]}],
'name': 'id_str'},
{'attributeForms': [{'category': 'ID',
'expressions': [{'formula': 'TEST.first_name '}]}],
'name': 'first_name '},
{'attributeForms': [{'category': 'ID',
'expressions': [{'formula': 'TEST.last_name'}]}],
'name': 'last_name'},
{'attributeForms': [{'category': 'ID',
'expressions': [{'formula': 'TEST.state'}]}],
'name': 'state'}],
[{'dataType': 'number',
'expressions': [{'formula': 'TEST.age'}],
'name': 'age'},
{'dataType': 'number',
'expressions': [{'formula': 'TEST.weight'}],
'name': 'weight'},
{'dataType': 'number',
'expressions': [{'formula': 'TEST.salary'}],
'name': 'salary'}]))
created_json = formjson(df=make_df(), table_name='TEST', as_attributes=['id_int'])
self.assertEquals(created_json, expected_json)
def test_formjson_with_metric_override(self):
expected_json = tuple(([{'dataType': 'INTEGER', 'name': 'id_int'},
{'dataType': 'STRING', 'name': 'id_str'},
{'dataType': 'STRING', 'name': 'first_name '},
{'dataType': 'STRING', 'name': 'last_name'},
{'dataType': 'INTEGER', 'name': 'age'},
{'dataType': 'DOUBLE', 'name': 'weight'},
{'dataType': 'STRING', 'name': 'state'},
{'dataType': 'INTEGER', 'name': 'salary'}],
[{'attributeForms': [{'category': 'ID',
'expressions': [{'formula': 'TEST.first_name '}]}],
'name': 'first_name '},
{'attributeForms': [{'category': 'ID',
'expressions': [{'formula': 'TEST.last_name'}]}],
'name': 'last_name'},
{'attributeForms': [{'category': 'ID',
'expressions': [{'formula': 'TEST.state'}]}],
'name': 'state'}],
[{'dataType': 'number',
'expressions': [{'formula': 'TEST.id_int'}],
'name': 'id_int'},
{'dataType': 'number',
'expressions': [{'formula': 'TEST.id_str'}],
'name': 'id_str'},
{'dataType': 'number',
'expressions': [{'formula': 'TEST.age'}],
'name': 'age'},
{'dataType': 'number',
'expressions': [{'formula': 'TEST.weight'}],
'name': 'weight'},
{'dataType': 'number',
'expressions': [{'formula': 'TEST.salary'}],
'name': 'salary'}]))
created_json = formjson(df=make_df(), table_name='TEST', as_metrics=['id_str'])
self.assertEquals(created_json, expected_json)
if __name__ == '__main__':
unittest.main()
| 61.130769
| 120
| 0.357619
| 519
| 7,947
| 5.310212
| 0.175337
| 0.156749
| 0.191582
| 0.152395
| 0.838897
| 0.835269
| 0.821118
| 0.821118
| 0.811684
| 0.811684
| 0
| 0.016183
| 0.471247
| 7,947
| 129
| 121
| 61.604651
| 0.639695
| 0
| 0
| 0.733333
| 0
| 0
| 0.278722
| 0
| 0
| 0
| 0
| 0
| 0.025
| 1
| 0.033333
| false
| 0
| 0.025
| 0
| 0.075
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f2662a2f6ab58afd29d8087ad08b21c9ee706e99
| 4,163
|
py
|
Python
|
tests/test_logging_slack.py
|
HazardDede/pnp
|
469ca17254dcca1a4eefe0dc5ac574692a9ab38e
|
[
"MIT"
] | 4
|
2018-10-07T11:32:00.000Z
|
2019-04-23T09:34:23.000Z
|
tests/test_logging_slack.py
|
HazardDede/pnp
|
469ca17254dcca1a4eefe0dc5ac574692a9ab38e
|
[
"MIT"
] | null | null | null |
tests/test_logging_slack.py
|
HazardDede/pnp
|
469ca17254dcca1a4eefe0dc5ac574692a9ab38e
|
[
"MIT"
] | 1
|
2019-08-12T19:56:10.000Z
|
2019-08-12T19:56:10.000Z
|
import logging
import sys
from logging import LogRecord
import pytest
from mock import patch, MagicMock
from pnp.logging import SlackHandler
@patch('pnp.logging.slacker.Slacker')
def test_logger_emit_for_smoke(slacker_mock):
slacker_mock.return_value = MagicMock()
dut = SlackHandler(
api_key='doesnt_matter',
channel='pytest',
fire_and_forget=False
)
dut.emit(LogRecord(
name='pytest',
level=logging.ERROR,
pathname='doesnt_matter',
lineno=42,
msg='LogRecord from pytest',
args=None,
exc_info=None
))
slacker_mock.return_value.chat.post_message.assert_called_once_with(
text=None,
channel='#pytest',
username=SlackHandler.DEFAULT_USERNAME,
icon_url=None,
icon_emoji=SlackHandler.DEFAULT_EMOJI,
attachments=[{
'color': 'danger',
'fields': [{'title': 'LogRecord from pytest', 'short': False}]
}]
)
@patch('pnp.logging.slacker.Slacker')
def test_logger_emit_with_trace(slacker_mock):
slacker_mock.return_value = MagicMock()
dut = SlackHandler(
api_key='doesnt_matter',
channel='pytest',
fire_and_forget=False
)
dut.MAX_ATTACHMENT_CHARS = 46
try:
raise Exception("EXCEPTION RAISED ON PURPOSE!")
except Exception:
dut.emit(LogRecord(
name='pytest',
level=logging.ERROR,
pathname='doesnt_matter',
lineno=42,
msg='LogRecord from pytest',
args=None,
exc_info=sys.exc_info()
))
slacker_mock.return_value.chat.post_message.assert_called_once_with(
text=None,
channel='#pytest',
username=SlackHandler.DEFAULT_USERNAME,
icon_url=None,
icon_emoji=SlackHandler.DEFAULT_EMOJI,
attachments=[{
'color': 'danger',
'fields': [{
'title': 'LogRecord from pytest',
'short': False,
'value': '```Exception: EXCEPTION RAISED ON PURPOSE!\n```'
}]
}]
)
@patch('pnp.logging.slacker.Slacker')
def test_ping_users_existing_user(slacker_mock):
# user_list = self.slacker.users.list().body['members']
slacker_mock.return_value = MagicMock()
slacker_mock.return_value.users.list.return_value.body = {
'members': [{
'id': 42,
'name': 'pytest_user',
'profile': {
'real_name': 'PyTest',
'display_name': 'PyTest'
}
}]
}
dut = SlackHandler(
api_key='doesnt_matter',
channel='pytest',
ping_users=['pytest_user'],
fire_and_forget=False
)
assert dut.ping_user_ids == [42]
dut.emit(LogRecord(
name='pytest',
level=logging.ERROR,
pathname='doesnt_matter',
lineno=42,
msg='LogRecord from pytest',
args=None,
exc_info=None
))
slacker_mock.return_value.chat.post_message.assert_called_once_with(
text="<@42> ",
channel='#pytest',
username=SlackHandler.DEFAULT_USERNAME,
icon_url=None,
icon_emoji=SlackHandler.DEFAULT_EMOJI,
attachments=[{
'color': 'danger',
'fields': [{'title': 'LogRecord from pytest', 'short': False}]
}]
)
@patch('pnp.logging.slacker.Slacker')
def test_ping_users_non_existing_user(slacker_mock):
# user_list = self.slacker.users.list().body['members']
slacker_mock.return_value = MagicMock()
slacker_mock.return_value.users.list.return_value.body = {
'members': [{
'id': 42,
'name': 'pytest_user',
'profile': {
'real_name': 'PyTest',
'display_name': 'PyTest'
}
}]
}
with pytest.raises(RuntimeError) as exc:
SlackHandler(
api_key='doesnt_matter',
channel='pytest',
ping_users=['doesnotexist'],
fire_and_forget=False
)
assert "User not found in Slack users list: doesnotexist" in str(exc)
| 27.753333
| 74
| 0.583714
| 431
| 4,163
| 5.399072
| 0.222738
| 0.061453
| 0.06575
| 0.085088
| 0.862484
| 0.813494
| 0.813494
| 0.813494
| 0.812205
| 0.720241
| 0
| 0.005476
| 0.298102
| 4,163
| 149
| 75
| 27.939597
| 0.790897
| 0.025703
| 0
| 0.717557
| 0
| 0
| 0.186035
| 0.026647
| 0
| 0
| 0
| 0
| 0.038168
| 1
| 0.030534
| false
| 0
| 0.045802
| 0
| 0.076336
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f2808d7e6674091a25e122ac7b207aa5f3f2ca26
| 2,909
|
py
|
Python
|
tests/test_socfaker_products_elastic_document.py
|
priamai/soc-faker
|
51b587f0cec52212136905280406e915006d2afc
|
[
"MIT"
] | 122
|
2020-02-21T16:06:54.000Z
|
2022-03-21T13:53:03.000Z
|
tests/test_socfaker_products_elastic_document.py
|
priamai/soc-faker
|
51b587f0cec52212136905280406e915006d2afc
|
[
"MIT"
] | 13
|
2020-01-29T16:37:05.000Z
|
2022-01-27T21:30:10.000Z
|
tests/test_socfaker_products_elastic_document.py
|
priamai/soc-faker
|
51b587f0cec52212136905280406e915006d2afc
|
[
"MIT"
] | 20
|
2020-04-10T11:59:29.000Z
|
2022-02-10T09:20:26.000Z
|
def test_socfaker_products_elastic_document_get(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.get(count=1)
def test_socfaker_products_elastic_document_fields_agent(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.agent
def test_socfaker_products_elastic_document_fields_base(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.base
def test_socfaker_products_elastic_document_fields_client(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.client
def test_socfaker_products_elastic_document_fields_container(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.container
def test_socfaker_products_elastic_document_fields_destination(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.destination
def test_socfaker_products_elastic_document_fields_dll(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.dll
def test_socfaker_products_elastic_document_fields_dns(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.dns
def test_socfaker_products_elastic_document_fields_event(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.event
def test_socfaker_products_elastic_document_fields_file(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.file
def test_socfaker_products_elastic_document_fields_host(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.host
def test_socfaker_products_elastic_document_fields_http(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.http
def test_socfaker_products_elastic_document_fields_network(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.network
def test_socfaker_products_elastic_document_fields_organization(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.organization
def test_socfaker_products_elastic_document_fields_package(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.package
def test_socfaker_products_elastic_document_fields_registry(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.registry
def test_socfaker_products_elastic_document_fields_server(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.server
def test_socfaker_products_elastic_document_fields_source(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.source
def test_socfaker_products_elastic_document_fields_cloud(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.cloud
def test_socfaker_products_elastic_document_fields_code_signature(socfaker_fixture):
assert socfaker_fixture.products.elastic.document.fields.code_signature
| 48.483333
| 84
| 0.888622
| 362
| 2,909
| 6.696133
| 0.088398
| 0.247525
| 0.379538
| 0.45462
| 0.99505
| 0.905116
| 0.878713
| 0.533828
| 0.509488
| 0
| 0
| 0.000364
| 0.055689
| 2,909
| 59
| 85
| 49.305085
| 0.882053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4b8631fa72d9b7a0059dc4252688832dbac68aca
| 134
|
py
|
Python
|
__init__.py
|
njchoma/DGAPN
|
a0fa29542a94dc6db250bd4f2cb0ac9f2d4c06d2
|
[
"MIT"
] | 21
|
2021-06-03T19:23:36.000Z
|
2022-03-01T18:39:30.000Z
|
__init__.py
|
yulun-rayn/DGAPN
|
6d87376fa933a0a5efff180ebe1fe5772a060987
|
[
"MIT"
] | null | null | null |
__init__.py
|
yulun-rayn/DGAPN
|
6d87376fa933a0a5efff180ebe1fe5772a060987
|
[
"MIT"
] | 5
|
2020-09-24T19:59:47.000Z
|
2021-05-28T23:56:29.000Z
|
from .src.dgapn.DGAPN import DGAPN, init_DGAPN, save_DGAPN, load_DGAPN
__all__ = ['DGAPN', 'init_DGAPN', 'save_DGAPN', 'load_DGAPN']
| 33.5
| 70
| 0.746269
| 20
| 134
| 4.5
| 0.4
| 0.2
| 0.311111
| 0.4
| 0.711111
| 0.711111
| 0.711111
| 0
| 0
| 0
| 0
| 0
| 0.104478
| 134
| 3
| 71
| 44.666667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.261194
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
29fe2ca55bf471426b4fdf77a3f34815f376dffc
| 123
|
py
|
Python
|
x_file_accel_redirects/conf.py
|
42cc/django-x-file-accel
|
526901214586d82ffc8684e241fb5550293c5552
|
[
"BSD-3-Clause"
] | 1
|
2019-02-10T12:32:57.000Z
|
2019-02-10T12:32:57.000Z
|
x_file_accel_redirects/conf.py
|
42cc/django-x-file-accel
|
526901214586d82ffc8684e241fb5550293c5552
|
[
"BSD-3-Clause"
] | null | null | null |
x_file_accel_redirects/conf.py
|
42cc/django-x-file-accel
|
526901214586d82ffc8684e241fb5550293c5552
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.conf import settings
settings.X_FILE_ACCEL = getattr(settings, 'X_FILE_ACCEL', False)
| 24.6
| 64
| 0.731707
| 18
| 123
| 4.777778
| 0.722222
| 0.209302
| 0.302326
| 0.418605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009259
| 0.121951
| 123
| 4
| 65
| 30.75
| 0.787037
| 0.170732
| 0
| 0
| 0
| 0
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
8a01cd7021166b71078be9851ccfb5c3311e609a
| 2,639
|
py
|
Python
|
tests/contracts/checker.py
|
cffbots/howfairis
|
008552b7266e229bd38553631d7dfe3554df18b2
|
[
"Apache-2.0"
] | 27
|
2020-09-10T10:04:56.000Z
|
2022-02-07T23:24:13.000Z
|
tests/contracts/checker.py
|
cffbots/howfairis
|
008552b7266e229bd38553631d7dfe3554df18b2
|
[
"Apache-2.0"
] | 297
|
2020-09-07T14:10:08.000Z
|
2022-02-18T09:46:30.000Z
|
tests/contracts/checker.py
|
cffbots/howfairis
|
008552b7266e229bd38553631d7dfe3554df18b2
|
[
"Apache-2.0"
] | 6
|
2020-09-10T12:58:37.000Z
|
2022-03-11T10:17:21.000Z
|
from abc import ABC, abstractmethod
from requests_mock.mocker import Mocker
class Contract(ABC):
@abstractmethod
def test_check_checklist(self, mocked_context: Mocker, captured_output):
pass
@abstractmethod
def test_check_citation(self, mocked_context: Mocker, captured_output):
pass
@abstractmethod
def test_check_license(self, mocked_context: Mocker, captured_output):
pass
@abstractmethod
def test_check_registry(self, mocked_context: Mocker, captured_output):
pass
@abstractmethod
def test_check_repository(self, mocked_context: Mocker, captured_output):
pass
@abstractmethod
def test_compliance(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_ascl_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_bintray_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_citation_file(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_citationcff_file(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_codemeta_file(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_conda_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_core_infrastructures_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_cran_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_crates_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_license(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_maven_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_npm_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_open_repository(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_pypi_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_rsd_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_zenodo_badge(self, mocked_context: Mocker):
pass
@abstractmethod
def test_has_zenodo_metadata_file(self, mocked_context: Mocker):
pass
@abstractmethod
def test_is_on_github_marketplace(self, mocked_context: Mocker):
pass
@abstractmethod
def test_readme(self, mocked_context: Mocker):
pass
@abstractmethod
def test_repo(self, mocked_context: Mocker):
pass
| 23.990909
| 77
| 0.703676
| 301
| 2,639
| 5.833887
| 0.162791
| 0.251708
| 0.310934
| 0.340547
| 0.845672
| 0.810934
| 0.810934
| 0.810934
| 0.756264
| 0.639522
| 0
| 0
| 0.233422
| 2,639
| 109
| 78
| 24.211009
| 0.868018
| 0
| 0
| 0.641975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.320988
| false
| 0.320988
| 0.024691
| 0
| 0.358025
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 11
|
8a1b887d871ec4912f4c8dc1e0a76834b0236607
| 262,430
|
py
|
Python
|
sdk/python/pulumi_gcp/osconfig/outputs.py
|
pjbizon/pulumi-gcp
|
0d09cbc1dcf50093a177531f7596c27db11a2e58
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/osconfig/outputs.py
|
pjbizon/pulumi-gcp
|
0d09cbc1dcf50093a177531f7596c27db11a2e58
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/osconfig/outputs.py
|
pjbizon/pulumi-gcp
|
0d09cbc1dcf50093a177531f7596c27db11a2e58
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GuestPoliciesAssignment',
'GuestPoliciesAssignmentGroupLabel',
'GuestPoliciesAssignmentOsType',
'GuestPoliciesPackage',
'GuestPoliciesPackageRepository',
'GuestPoliciesPackageRepositoryApt',
'GuestPoliciesPackageRepositoryGoo',
'GuestPoliciesPackageRepositoryYum',
'GuestPoliciesPackageRepositoryZypper',
'GuestPoliciesRecipe',
'GuestPoliciesRecipeArtifact',
'GuestPoliciesRecipeArtifactGcs',
'GuestPoliciesRecipeArtifactRemote',
'GuestPoliciesRecipeInstallStep',
'GuestPoliciesRecipeInstallStepArchiveExtraction',
'GuestPoliciesRecipeInstallStepDpkgInstallation',
'GuestPoliciesRecipeInstallStepFileCopy',
'GuestPoliciesRecipeInstallStepFileExec',
'GuestPoliciesRecipeInstallStepMsiInstallation',
'GuestPoliciesRecipeInstallStepRpmInstallation',
'GuestPoliciesRecipeInstallStepScriptRun',
'GuestPoliciesRecipeUpdateStep',
'GuestPoliciesRecipeUpdateStepArchiveExtraction',
'GuestPoliciesRecipeUpdateStepDpkgInstallation',
'GuestPoliciesRecipeUpdateStepFileCopy',
'GuestPoliciesRecipeUpdateStepFileExec',
'GuestPoliciesRecipeUpdateStepMsiInstallation',
'GuestPoliciesRecipeUpdateStepRpmInstallation',
'GuestPoliciesRecipeUpdateStepScriptRun',
'OsPolicyAssignmentInstanceFilter',
'OsPolicyAssignmentInstanceFilterExclusionLabel',
'OsPolicyAssignmentInstanceFilterInclusionLabel',
'OsPolicyAssignmentInstanceFilterInventory',
'OsPolicyAssignmentOsPolicy',
'OsPolicyAssignmentOsPolicyResourceGroup',
'OsPolicyAssignmentOsPolicyResourceGroupInventoryFilter',
'OsPolicyAssignmentOsPolicyResourceGroupResource',
'OsPolicyAssignmentOsPolicyResourceGroupResourceExec',
'OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforce',
'OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFile',
'OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileGcs',
'OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileRemote',
'OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidate',
'OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFile',
'OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileGcs',
'OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileRemote',
'OsPolicyAssignmentOsPolicyResourceGroupResourceFile',
'OsPolicyAssignmentOsPolicyResourceGroupResourceFileFile',
'OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileGcs',
'OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileRemote',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkg',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgApt',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDeb',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSource',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceGcs',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceRemote',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgGooget',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsi',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSource',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceGcs',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceRemote',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpm',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSource',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceGcs',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceRemote',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgYum',
'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgZypper',
'OsPolicyAssignmentOsPolicyResourceGroupResourceRepository',
'OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryApt',
'OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryGoo',
'OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryYum',
'OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryZypper',
'OsPolicyAssignmentRollout',
'OsPolicyAssignmentRolloutDisruptionBudget',
'PatchDeploymentInstanceFilter',
'PatchDeploymentInstanceFilterGroupLabel',
'PatchDeploymentOneTimeSchedule',
'PatchDeploymentPatchConfig',
'PatchDeploymentPatchConfigApt',
'PatchDeploymentPatchConfigGoo',
'PatchDeploymentPatchConfigPostStep',
'PatchDeploymentPatchConfigPostStepLinuxExecStepConfig',
'PatchDeploymentPatchConfigPostStepLinuxExecStepConfigGcsObject',
'PatchDeploymentPatchConfigPostStepWindowsExecStepConfig',
'PatchDeploymentPatchConfigPostStepWindowsExecStepConfigGcsObject',
'PatchDeploymentPatchConfigPreStep',
'PatchDeploymentPatchConfigPreStepLinuxExecStepConfig',
'PatchDeploymentPatchConfigPreStepLinuxExecStepConfigGcsObject',
'PatchDeploymentPatchConfigPreStepWindowsExecStepConfig',
'PatchDeploymentPatchConfigPreStepWindowsExecStepConfigGcsObject',
'PatchDeploymentPatchConfigWindowsUpdate',
'PatchDeploymentPatchConfigYum',
'PatchDeploymentPatchConfigZypper',
'PatchDeploymentRecurringSchedule',
'PatchDeploymentRecurringScheduleMonthly',
'PatchDeploymentRecurringScheduleMonthlyWeekDayOfMonth',
'PatchDeploymentRecurringScheduleTimeOfDay',
'PatchDeploymentRecurringScheduleTimeZone',
'PatchDeploymentRecurringScheduleWeekly',
'PatchDeploymentRollout',
'PatchDeploymentRolloutDisruptionBudget',
]
@pulumi.output_type
class GuestPoliciesAssignment(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "groupLabels":
suggest = "group_labels"
elif key == "instanceNamePrefixes":
suggest = "instance_name_prefixes"
elif key == "osTypes":
suggest = "os_types"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesAssignment. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesAssignment.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesAssignment.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
group_labels: Optional[Sequence['outputs.GuestPoliciesAssignmentGroupLabel']] = None,
instance_name_prefixes: Optional[Sequence[str]] = None,
instances: Optional[Sequence[str]] = None,
os_types: Optional[Sequence['outputs.GuestPoliciesAssignmentOsType']] = None,
zones: Optional[Sequence[str]] = None):
"""
:param Sequence['GuestPoliciesAssignmentGroupLabelArgs'] group_labels: Targets instances matching at least one of these label sets. This allows an assignment to target disparate groups,
for example "env=prod or env=staging".
Structure is documented below.
:param Sequence[str] instance_name_prefixes: Targets VM instances whose name starts with one of these prefixes.
Like labels, this is another way to group VM instances when targeting configs,
for example prefix="prod-".
Only supported for project-level policies.
:param Sequence[str] instances: Targets any of the instances specified. Instances are specified by their URI in the form
zones/[ZONE]/instances/[INSTANCE_NAME].
Instance targeting is uncommon and is supported to facilitate the management of changes
by the instance or to target specific VM instances for development and testing.
Only supported for project-level policies and must reference instances within this project.
:param Sequence['GuestPoliciesAssignmentOsTypeArgs'] os_types: Targets VM instances matching at least one of the following OS types.
VM instances must match all supplied criteria for a given OsType to be included.
Structure is documented below.
:param Sequence[str] zones: Targets instances in any of these zones. Leave empty to target instances in any zone.
Zonal targeting is uncommon and is supported to facilitate the management of changes by zone.
"""
if group_labels is not None:
pulumi.set(__self__, "group_labels", group_labels)
if instance_name_prefixes is not None:
pulumi.set(__self__, "instance_name_prefixes", instance_name_prefixes)
if instances is not None:
pulumi.set(__self__, "instances", instances)
if os_types is not None:
pulumi.set(__self__, "os_types", os_types)
if zones is not None:
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter(name="groupLabels")
def group_labels(self) -> Optional[Sequence['outputs.GuestPoliciesAssignmentGroupLabel']]:
"""
Targets instances matching at least one of these label sets. This allows an assignment to target disparate groups,
for example "env=prod or env=staging".
Structure is documented below.
"""
return pulumi.get(self, "group_labels")
@property
@pulumi.getter(name="instanceNamePrefixes")
def instance_name_prefixes(self) -> Optional[Sequence[str]]:
"""
Targets VM instances whose name starts with one of these prefixes.
Like labels, this is another way to group VM instances when targeting configs,
for example prefix="prod-".
Only supported for project-level policies.
"""
return pulumi.get(self, "instance_name_prefixes")
@property
@pulumi.getter
def instances(self) -> Optional[Sequence[str]]:
"""
Targets any of the instances specified. Instances are specified by their URI in the form
zones/[ZONE]/instances/[INSTANCE_NAME].
Instance targeting is uncommon and is supported to facilitate the management of changes
by the instance or to target specific VM instances for development and testing.
Only supported for project-level policies and must reference instances within this project.
"""
return pulumi.get(self, "instances")
@property
@pulumi.getter(name="osTypes")
def os_types(self) -> Optional[Sequence['outputs.GuestPoliciesAssignmentOsType']]:
"""
Targets VM instances matching at least one of the following OS types.
VM instances must match all supplied criteria for a given OsType to be included.
Structure is documented below.
"""
return pulumi.get(self, "os_types")
@property
@pulumi.getter
def zones(self) -> Optional[Sequence[str]]:
"""
Targets instances in any of these zones. Leave empty to target instances in any zone.
Zonal targeting is uncommon and is supported to facilitate the management of changes by zone.
"""
return pulumi.get(self, "zones")
@pulumi.output_type
class GuestPoliciesAssignmentGroupLabel(dict):
def __init__(__self__, *,
labels: Mapping[str, str]):
"""
:param Mapping[str, str] labels: Google Compute Engine instance labels that must be present for an instance to be included in this assignment group.
"""
pulumi.set(__self__, "labels", labels)
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
"""
Google Compute Engine instance labels that must be present for an instance to be included in this assignment group.
"""
return pulumi.get(self, "labels")
@pulumi.output_type
class GuestPoliciesAssignmentOsType(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "osArchitecture":
suggest = "os_architecture"
elif key == "osShortName":
suggest = "os_short_name"
elif key == "osVersion":
suggest = "os_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesAssignmentOsType. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesAssignmentOsType.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesAssignmentOsType.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
os_architecture: Optional[str] = None,
os_short_name: Optional[str] = None,
os_version: Optional[str] = None):
"""
:param str os_architecture: Targets VM instances with OS Inventory enabled and having the following OS architecture.
:param str os_short_name: Targets VM instances with OS Inventory enabled and having the following OS short name, for example "debian" or "windows".
:param str os_version: Targets VM instances with OS Inventory enabled and having the following following OS version.
"""
if os_architecture is not None:
pulumi.set(__self__, "os_architecture", os_architecture)
if os_short_name is not None:
pulumi.set(__self__, "os_short_name", os_short_name)
if os_version is not None:
pulumi.set(__self__, "os_version", os_version)
@property
@pulumi.getter(name="osArchitecture")
def os_architecture(self) -> Optional[str]:
"""
Targets VM instances with OS Inventory enabled and having the following OS architecture.
"""
return pulumi.get(self, "os_architecture")
@property
@pulumi.getter(name="osShortName")
def os_short_name(self) -> Optional[str]:
"""
Targets VM instances with OS Inventory enabled and having the following OS short name, for example "debian" or "windows".
"""
return pulumi.get(self, "os_short_name")
@property
@pulumi.getter(name="osVersion")
def os_version(self) -> Optional[str]:
"""
Targets VM instances with OS Inventory enabled and having the following following OS version.
"""
return pulumi.get(self, "os_version")
@pulumi.output_type
class GuestPoliciesPackage(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "desiredState":
suggest = "desired_state"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesPackage. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesPackage.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesPackage.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
name: str,
desired_state: Optional[str] = None,
manager: Optional[str] = None):
"""
:param str name: Unique identifier for the recipe. Only one recipe with a given name is installed on an instance.
Names are also used to identify resources which helps to determine whether guest policies have conflicts.
This means that requests to create multiple recipes with the same name and version are rejected since they
could potentially have conflicting assignments.
:param str desired_state: Default is INSTALLED. The desired state the agent should maintain for this recipe.
INSTALLED: The software recipe is installed on the instance but won't be updated to new versions.
INSTALLED_KEEP_UPDATED: The software recipe is installed on the instance. The recipe is updated to a higher version,
if a higher version of the recipe is assigned to this instance.
REMOVE: Remove is unsupported for software recipes and attempts to create or update a recipe to the REMOVE state is rejected.
Default value is `INSTALLED`.
Possible values are `INSTALLED`, `UPDATED`, and `REMOVED`.
:param str manager: Type of package manager that can be used to install this package. If a system does not have the package manager,
the package is not installed or removed no error message is returned. By default, or if you specify ANY,
the agent attempts to install and remove this package using the default package manager.
This is useful when creating a policy that applies to different types of systems.
The default behavior is ANY.
Default value is `ANY`.
Possible values are `ANY`, `APT`, `YUM`, `ZYPPER`, and `GOO`.
"""
pulumi.set(__self__, "name", name)
if desired_state is not None:
pulumi.set(__self__, "desired_state", desired_state)
if manager is not None:
pulumi.set(__self__, "manager", manager)
@property
@pulumi.getter
def name(self) -> str:
"""
Unique identifier for the recipe. Only one recipe with a given name is installed on an instance.
Names are also used to identify resources which helps to determine whether guest policies have conflicts.
This means that requests to create multiple recipes with the same name and version are rejected since they
could potentially have conflicting assignments.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="desiredState")
def desired_state(self) -> Optional[str]:
"""
Default is INSTALLED. The desired state the agent should maintain for this recipe.
INSTALLED: The software recipe is installed on the instance but won't be updated to new versions.
INSTALLED_KEEP_UPDATED: The software recipe is installed on the instance. The recipe is updated to a higher version,
if a higher version of the recipe is assigned to this instance.
REMOVE: Remove is unsupported for software recipes and attempts to create or update a recipe to the REMOVE state is rejected.
Default value is `INSTALLED`.
Possible values are `INSTALLED`, `UPDATED`, and `REMOVED`.
"""
return pulumi.get(self, "desired_state")
@property
@pulumi.getter
def manager(self) -> Optional[str]:
"""
Type of package manager that can be used to install this package. If a system does not have the package manager,
the package is not installed or removed no error message is returned. By default, or if you specify ANY,
the agent attempts to install and remove this package using the default package manager.
This is useful when creating a policy that applies to different types of systems.
The default behavior is ANY.
Default value is `ANY`.
Possible values are `ANY`, `APT`, `YUM`, `ZYPPER`, and `GOO`.
"""
return pulumi.get(self, "manager")
@pulumi.output_type
class GuestPoliciesPackageRepository(dict):
def __init__(__self__, *,
apt: Optional['outputs.GuestPoliciesPackageRepositoryApt'] = None,
goo: Optional['outputs.GuestPoliciesPackageRepositoryGoo'] = None,
yum: Optional['outputs.GuestPoliciesPackageRepositoryYum'] = None,
zypper: Optional['outputs.GuestPoliciesPackageRepositoryZypper'] = None):
"""
:param 'GuestPoliciesPackageRepositoryAptArgs' apt: An Apt Repository.
Structure is documented below.
:param 'GuestPoliciesPackageRepositoryGooArgs' goo: A Goo Repository.
Structure is documented below.
:param 'GuestPoliciesPackageRepositoryYumArgs' yum: A Yum Repository.
Structure is documented below.
:param 'GuestPoliciesPackageRepositoryZypperArgs' zypper: A Zypper Repository.
Structure is documented below.
"""
if apt is not None:
pulumi.set(__self__, "apt", apt)
if goo is not None:
pulumi.set(__self__, "goo", goo)
if yum is not None:
pulumi.set(__self__, "yum", yum)
if zypper is not None:
pulumi.set(__self__, "zypper", zypper)
@property
@pulumi.getter
def apt(self) -> Optional['outputs.GuestPoliciesPackageRepositoryApt']:
"""
An Apt Repository.
Structure is documented below.
"""
return pulumi.get(self, "apt")
@property
@pulumi.getter
def goo(self) -> Optional['outputs.GuestPoliciesPackageRepositoryGoo']:
"""
A Goo Repository.
Structure is documented below.
"""
return pulumi.get(self, "goo")
@property
@pulumi.getter
def yum(self) -> Optional['outputs.GuestPoliciesPackageRepositoryYum']:
"""
A Yum Repository.
Structure is documented below.
"""
return pulumi.get(self, "yum")
@property
@pulumi.getter
def zypper(self) -> Optional['outputs.GuestPoliciesPackageRepositoryZypper']:
"""
A Zypper Repository.
Structure is documented below.
"""
return pulumi.get(self, "zypper")
@pulumi.output_type
class GuestPoliciesPackageRepositoryApt(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "archiveType":
suggest = "archive_type"
elif key == "gpgKey":
suggest = "gpg_key"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesPackageRepositoryApt. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesPackageRepositoryApt.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesPackageRepositoryApt.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
components: Sequence[str],
distribution: str,
uri: str,
archive_type: Optional[str] = None,
gpg_key: Optional[str] = None):
"""
:param Sequence[str] components: List of components for this repository. Must contain at least one item.
:param str distribution: Distribution of this repository.
:param str uri: URI from which to fetch the object. It should contain both the protocol and path following the format {protocol}://{location}.
:param str archive_type: Type of archive files in this repository. The default behavior is DEB.
Default value is `DEB`.
Possible values are `DEB` and `DEB_SRC`.
:param str gpg_key: URI of the key file for this repository. The agent maintains a keyring at
/etc/apt/trusted.gpg.d/osconfig_agent_managed.gpg containing all the keys in any applied guest policy.
"""
pulumi.set(__self__, "components", components)
pulumi.set(__self__, "distribution", distribution)
pulumi.set(__self__, "uri", uri)
if archive_type is not None:
pulumi.set(__self__, "archive_type", archive_type)
if gpg_key is not None:
pulumi.set(__self__, "gpg_key", gpg_key)
@property
@pulumi.getter
def components(self) -> Sequence[str]:
"""
List of components for this repository. Must contain at least one item.
"""
return pulumi.get(self, "components")
@property
@pulumi.getter
def distribution(self) -> str:
"""
Distribution of this repository.
"""
return pulumi.get(self, "distribution")
@property
@pulumi.getter
def uri(self) -> str:
"""
URI from which to fetch the object. It should contain both the protocol and path following the format {protocol}://{location}.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="archiveType")
def archive_type(self) -> Optional[str]:
"""
Type of archive files in this repository. The default behavior is DEB.
Default value is `DEB`.
Possible values are `DEB` and `DEB_SRC`.
"""
return pulumi.get(self, "archive_type")
@property
@pulumi.getter(name="gpgKey")
def gpg_key(self) -> Optional[str]:
"""
URI of the key file for this repository. The agent maintains a keyring at
/etc/apt/trusted.gpg.d/osconfig_agent_managed.gpg containing all the keys in any applied guest policy.
"""
return pulumi.get(self, "gpg_key")
@pulumi.output_type
class GuestPoliciesPackageRepositoryGoo(dict):
def __init__(__self__, *,
name: str,
url: str):
"""
:param str name: Unique identifier for the recipe. Only one recipe with a given name is installed on an instance.
Names are also used to identify resources which helps to determine whether guest policies have conflicts.
This means that requests to create multiple recipes with the same name and version are rejected since they
could potentially have conflicting assignments.
:param str url: The url of the repository.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def name(self) -> str:
"""
Unique identifier for the recipe. Only one recipe with a given name is installed on an instance.
Names are also used to identify resources which helps to determine whether guest policies have conflicts.
This means that requests to create multiple recipes with the same name and version are rejected since they
could potentially have conflicting assignments.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def url(self) -> str:
"""
The url of the repository.
"""
return pulumi.get(self, "url")
@pulumi.output_type
class GuestPoliciesPackageRepositoryYum(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "baseUrl":
suggest = "base_url"
elif key == "displayName":
suggest = "display_name"
elif key == "gpgKeys":
suggest = "gpg_keys"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesPackageRepositoryYum. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesPackageRepositoryYum.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesPackageRepositoryYum.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
base_url: str,
id: str,
display_name: Optional[str] = None,
gpg_keys: Optional[Sequence[str]] = None):
"""
:param str base_url: The location of the repository directory.
:param str id: Id of the artifact, which the installation and update steps of this recipe can reference.
Artifacts in a recipe cannot have the same id.
:param str display_name: The display name of the repository.
:param Sequence[str] gpg_keys: URIs of GPG keys.
"""
pulumi.set(__self__, "base_url", base_url)
pulumi.set(__self__, "id", id)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if gpg_keys is not None:
pulumi.set(__self__, "gpg_keys", gpg_keys)
@property
@pulumi.getter(name="baseUrl")
def base_url(self) -> str:
"""
The location of the repository directory.
"""
return pulumi.get(self, "base_url")
@property
@pulumi.getter
def id(self) -> str:
"""
Id of the artifact, which the installation and update steps of this recipe can reference.
Artifacts in a recipe cannot have the same id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
"""
The display name of the repository.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="gpgKeys")
def gpg_keys(self) -> Optional[Sequence[str]]:
"""
URIs of GPG keys.
"""
return pulumi.get(self, "gpg_keys")
@pulumi.output_type
class GuestPoliciesPackageRepositoryZypper(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "baseUrl":
suggest = "base_url"
elif key == "displayName":
suggest = "display_name"
elif key == "gpgKeys":
suggest = "gpg_keys"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesPackageRepositoryZypper. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesPackageRepositoryZypper.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesPackageRepositoryZypper.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
base_url: str,
id: str,
display_name: Optional[str] = None,
gpg_keys: Optional[Sequence[str]] = None):
"""
:param str base_url: The location of the repository directory.
:param str id: Id of the artifact, which the installation and update steps of this recipe can reference.
Artifacts in a recipe cannot have the same id.
:param str display_name: The display name of the repository.
:param Sequence[str] gpg_keys: URIs of GPG keys.
"""
pulumi.set(__self__, "base_url", base_url)
pulumi.set(__self__, "id", id)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if gpg_keys is not None:
pulumi.set(__self__, "gpg_keys", gpg_keys)
@property
@pulumi.getter(name="baseUrl")
def base_url(self) -> str:
"""
The location of the repository directory.
"""
return pulumi.get(self, "base_url")
@property
@pulumi.getter
def id(self) -> str:
"""
Id of the artifact, which the installation and update steps of this recipe can reference.
Artifacts in a recipe cannot have the same id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
"""
The display name of the repository.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="gpgKeys")
def gpg_keys(self) -> Optional[Sequence[str]]:
"""
URIs of GPG keys.
"""
return pulumi.get(self, "gpg_keys")
@pulumi.output_type
class GuestPoliciesRecipe(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "desiredState":
suggest = "desired_state"
elif key == "installSteps":
suggest = "install_steps"
elif key == "updateSteps":
suggest = "update_steps"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipe. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipe.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipe.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
name: str,
artifacts: Optional[Sequence['outputs.GuestPoliciesRecipeArtifact']] = None,
desired_state: Optional[str] = None,
install_steps: Optional[Sequence['outputs.GuestPoliciesRecipeInstallStep']] = None,
update_steps: Optional[Sequence['outputs.GuestPoliciesRecipeUpdateStep']] = None,
version: Optional[str] = None):
"""
:param str name: Unique identifier for the recipe. Only one recipe with a given name is installed on an instance.
Names are also used to identify resources which helps to determine whether guest policies have conflicts.
This means that requests to create multiple recipes with the same name and version are rejected since they
could potentially have conflicting assignments.
:param Sequence['GuestPoliciesRecipeArtifactArgs'] artifacts: Resources available to be used in the steps in the recipe.
Structure is documented below.
:param str desired_state: Default is INSTALLED. The desired state the agent should maintain for this recipe.
INSTALLED: The software recipe is installed on the instance but won't be updated to new versions.
INSTALLED_KEEP_UPDATED: The software recipe is installed on the instance. The recipe is updated to a higher version,
if a higher version of the recipe is assigned to this instance.
REMOVE: Remove is unsupported for software recipes and attempts to create or update a recipe to the REMOVE state is rejected.
Default value is `INSTALLED`.
Possible values are `INSTALLED`, `UPDATED`, and `REMOVED`.
:param Sequence['GuestPoliciesRecipeInstallStepArgs'] install_steps: Actions to be taken for installing this recipe. On failure it stops executing steps and does not attempt another installation.
Any steps taken (including partially completed steps) are not rolled back.
Structure is documented below.
:param Sequence['GuestPoliciesRecipeUpdateStepArgs'] update_steps: Actions to be taken for updating this recipe. On failure it stops executing steps and does not attempt another update for this recipe.
Any steps taken (including partially completed steps) are not rolled back.
Structure is documented below.
:param str version: The version of this software recipe. Version can be up to 4 period separated numbers (e.g. 12.34.56.78).
"""
pulumi.set(__self__, "name", name)
if artifacts is not None:
pulumi.set(__self__, "artifacts", artifacts)
if desired_state is not None:
pulumi.set(__self__, "desired_state", desired_state)
if install_steps is not None:
pulumi.set(__self__, "install_steps", install_steps)
if update_steps is not None:
pulumi.set(__self__, "update_steps", update_steps)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def name(self) -> str:
"""
Unique identifier for the recipe. Only one recipe with a given name is installed on an instance.
Names are also used to identify resources which helps to determine whether guest policies have conflicts.
This means that requests to create multiple recipes with the same name and version are rejected since they
could potentially have conflicting assignments.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def artifacts(self) -> Optional[Sequence['outputs.GuestPoliciesRecipeArtifact']]:
"""
Resources available to be used in the steps in the recipe.
Structure is documented below.
"""
return pulumi.get(self, "artifacts")
@property
@pulumi.getter(name="desiredState")
def desired_state(self) -> Optional[str]:
"""
Default is INSTALLED. The desired state the agent should maintain for this recipe.
INSTALLED: The software recipe is installed on the instance but won't be updated to new versions.
INSTALLED_KEEP_UPDATED: The software recipe is installed on the instance. The recipe is updated to a higher version,
if a higher version of the recipe is assigned to this instance.
REMOVE: Remove is unsupported for software recipes and attempts to create or update a recipe to the REMOVE state is rejected.
Default value is `INSTALLED`.
Possible values are `INSTALLED`, `UPDATED`, and `REMOVED`.
"""
return pulumi.get(self, "desired_state")
@property
@pulumi.getter(name="installSteps")
def install_steps(self) -> Optional[Sequence['outputs.GuestPoliciesRecipeInstallStep']]:
"""
Actions to be taken for installing this recipe. On failure it stops executing steps and does not attempt another installation.
Any steps taken (including partially completed steps) are not rolled back.
Structure is documented below.
"""
return pulumi.get(self, "install_steps")
@property
@pulumi.getter(name="updateSteps")
def update_steps(self) -> Optional[Sequence['outputs.GuestPoliciesRecipeUpdateStep']]:
"""
Actions to be taken for updating this recipe. On failure it stops executing steps and does not attempt another update for this recipe.
Any steps taken (including partially completed steps) are not rolled back.
Structure is documented below.
"""
return pulumi.get(self, "update_steps")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
The version of this software recipe. Version can be up to 4 period separated numbers (e.g. 12.34.56.78).
"""
return pulumi.get(self, "version")
@pulumi.output_type
class GuestPoliciesRecipeArtifact(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowInsecure":
suggest = "allow_insecure"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeArtifact. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeArtifact.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeArtifact.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
allow_insecure: Optional[bool] = None,
gcs: Optional['outputs.GuestPoliciesRecipeArtifactGcs'] = None,
remote: Optional['outputs.GuestPoliciesRecipeArtifactRemote'] = None):
"""
:param str id: Id of the artifact, which the installation and update steps of this recipe can reference.
Artifacts in a recipe cannot have the same id.
:param bool allow_insecure: Defaults to false. When false, recipes are subject to validations based on the artifact type:
Remote: A checksum must be specified, and only protocols with transport-layer security are permitted.
GCS: An object generation number must be specified.
:param 'GuestPoliciesRecipeArtifactGcsArgs' gcs: A Google Cloud Storage artifact.
Structure is documented below.
:param 'GuestPoliciesRecipeArtifactRemoteArgs' remote: A generic remote artifact.
Structure is documented below.
"""
pulumi.set(__self__, "id", id)
if allow_insecure is not None:
pulumi.set(__self__, "allow_insecure", allow_insecure)
if gcs is not None:
pulumi.set(__self__, "gcs", gcs)
if remote is not None:
pulumi.set(__self__, "remote", remote)
@property
@pulumi.getter
def id(self) -> str:
"""
Id of the artifact, which the installation and update steps of this recipe can reference.
Artifacts in a recipe cannot have the same id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="allowInsecure")
def allow_insecure(self) -> Optional[bool]:
"""
Defaults to false. When false, recipes are subject to validations based on the artifact type:
Remote: A checksum must be specified, and only protocols with transport-layer security are permitted.
GCS: An object generation number must be specified.
"""
return pulumi.get(self, "allow_insecure")
@property
@pulumi.getter
def gcs(self) -> Optional['outputs.GuestPoliciesRecipeArtifactGcs']:
"""
A Google Cloud Storage artifact.
Structure is documented below.
"""
return pulumi.get(self, "gcs")
@property
@pulumi.getter
def remote(self) -> Optional['outputs.GuestPoliciesRecipeArtifactRemote']:
"""
A generic remote artifact.
Structure is documented below.
"""
return pulumi.get(self, "remote")
@pulumi.output_type
class GuestPoliciesRecipeArtifactGcs(dict):
def __init__(__self__, *,
bucket: Optional[str] = None,
generation: Optional[int] = None,
object: Optional[str] = None):
"""
:param str bucket: Bucket of the Google Cloud Storage object. Given an example URL: https://storage.googleapis.com/my-bucket/foo/bar#1234567
this value would be my-bucket.
:param int generation: Must be provided if allowInsecure is false. Generation number of the Google Cloud Storage object.
https://storage.googleapis.com/my-bucket/foo/bar#1234567 this value would be 1234567.
:param str object: Name of the Google Cloud Storage object. Given an example URL: https://storage.googleapis.com/my-bucket/foo/bar#1234567
this value would be foo/bar.
"""
if bucket is not None:
pulumi.set(__self__, "bucket", bucket)
if generation is not None:
pulumi.set(__self__, "generation", generation)
if object is not None:
pulumi.set(__self__, "object", object)
@property
@pulumi.getter
def bucket(self) -> Optional[str]:
"""
Bucket of the Google Cloud Storage object. Given an example URL: https://storage.googleapis.com/my-bucket/foo/bar#1234567
this value would be my-bucket.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def generation(self) -> Optional[int]:
"""
Must be provided if allowInsecure is false. Generation number of the Google Cloud Storage object.
https://storage.googleapis.com/my-bucket/foo/bar#1234567 this value would be 1234567.
"""
return pulumi.get(self, "generation")
@property
@pulumi.getter
def object(self) -> Optional[str]:
"""
Name of the Google Cloud Storage object. Given an example URL: https://storage.googleapis.com/my-bucket/foo/bar#1234567
this value would be foo/bar.
"""
return pulumi.get(self, "object")
@pulumi.output_type
class GuestPoliciesRecipeArtifactRemote(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "checkSum":
suggest = "check_sum"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeArtifactRemote. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeArtifactRemote.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeArtifactRemote.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
check_sum: Optional[str] = None,
uri: Optional[str] = None):
"""
:param str check_sum: Must be provided if allowInsecure is false. SHA256 checksum in hex format, to compare to the checksum of the artifact.
If the checksum is not empty and it doesn't match the artifact then the recipe installation fails before running any
of the steps.
:param str uri: URI from which to fetch the object. It should contain both the protocol and path following the format {protocol}://{location}.
"""
if check_sum is not None:
pulumi.set(__self__, "check_sum", check_sum)
if uri is not None:
pulumi.set(__self__, "uri", uri)
@property
@pulumi.getter(name="checkSum")
def check_sum(self) -> Optional[str]:
"""
Must be provided if allowInsecure is false. SHA256 checksum in hex format, to compare to the checksum of the artifact.
If the checksum is not empty and it doesn't match the artifact then the recipe installation fails before running any
of the steps.
"""
return pulumi.get(self, "check_sum")
@property
@pulumi.getter
def uri(self) -> Optional[str]:
"""
URI from which to fetch the object. It should contain both the protocol and path following the format {protocol}://{location}.
"""
return pulumi.get(self, "uri")
@pulumi.output_type
class GuestPoliciesRecipeInstallStep(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "archiveExtraction":
suggest = "archive_extraction"
elif key == "dpkgInstallation":
suggest = "dpkg_installation"
elif key == "fileCopy":
suggest = "file_copy"
elif key == "fileExec":
suggest = "file_exec"
elif key == "msiInstallation":
suggest = "msi_installation"
elif key == "rpmInstallation":
suggest = "rpm_installation"
elif key == "scriptRun":
suggest = "script_run"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeInstallStep. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeInstallStep.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeInstallStep.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
archive_extraction: Optional['outputs.GuestPoliciesRecipeInstallStepArchiveExtraction'] = None,
dpkg_installation: Optional['outputs.GuestPoliciesRecipeInstallStepDpkgInstallation'] = None,
file_copy: Optional['outputs.GuestPoliciesRecipeInstallStepFileCopy'] = None,
file_exec: Optional['outputs.GuestPoliciesRecipeInstallStepFileExec'] = None,
msi_installation: Optional['outputs.GuestPoliciesRecipeInstallStepMsiInstallation'] = None,
rpm_installation: Optional['outputs.GuestPoliciesRecipeInstallStepRpmInstallation'] = None,
script_run: Optional['outputs.GuestPoliciesRecipeInstallStepScriptRun'] = None):
"""
:param 'GuestPoliciesRecipeInstallStepArchiveExtractionArgs' archive_extraction: Extracts an archive into the specified directory.
Structure is documented below.
:param 'GuestPoliciesRecipeInstallStepDpkgInstallationArgs' dpkg_installation: Installs a deb file via dpkg.
Structure is documented below.
:param 'GuestPoliciesRecipeInstallStepFileCopyArgs' file_copy: Copies a file onto the instance.
Structure is documented below.
:param 'GuestPoliciesRecipeInstallStepFileExecArgs' file_exec: Executes an artifact or local file.
Structure is documented below.
:param 'GuestPoliciesRecipeInstallStepMsiInstallationArgs' msi_installation: Installs an MSI file.
Structure is documented below.
:param 'GuestPoliciesRecipeInstallStepRpmInstallationArgs' rpm_installation: Installs an rpm file via the rpm utility.
Structure is documented below.
:param 'GuestPoliciesRecipeInstallStepScriptRunArgs' script_run: Runs commands in a shell.
Structure is documented below.
"""
if archive_extraction is not None:
pulumi.set(__self__, "archive_extraction", archive_extraction)
if dpkg_installation is not None:
pulumi.set(__self__, "dpkg_installation", dpkg_installation)
if file_copy is not None:
pulumi.set(__self__, "file_copy", file_copy)
if file_exec is not None:
pulumi.set(__self__, "file_exec", file_exec)
if msi_installation is not None:
pulumi.set(__self__, "msi_installation", msi_installation)
if rpm_installation is not None:
pulumi.set(__self__, "rpm_installation", rpm_installation)
if script_run is not None:
pulumi.set(__self__, "script_run", script_run)
@property
@pulumi.getter(name="archiveExtraction")
def archive_extraction(self) -> Optional['outputs.GuestPoliciesRecipeInstallStepArchiveExtraction']:
"""
Extracts an archive into the specified directory.
Structure is documented below.
"""
return pulumi.get(self, "archive_extraction")
@property
@pulumi.getter(name="dpkgInstallation")
def dpkg_installation(self) -> Optional['outputs.GuestPoliciesRecipeInstallStepDpkgInstallation']:
"""
Installs a deb file via dpkg.
Structure is documented below.
"""
return pulumi.get(self, "dpkg_installation")
@property
@pulumi.getter(name="fileCopy")
def file_copy(self) -> Optional['outputs.GuestPoliciesRecipeInstallStepFileCopy']:
"""
Copies a file onto the instance.
Structure is documented below.
"""
return pulumi.get(self, "file_copy")
@property
@pulumi.getter(name="fileExec")
def file_exec(self) -> Optional['outputs.GuestPoliciesRecipeInstallStepFileExec']:
"""
Executes an artifact or local file.
Structure is documented below.
"""
return pulumi.get(self, "file_exec")
@property
@pulumi.getter(name="msiInstallation")
def msi_installation(self) -> Optional['outputs.GuestPoliciesRecipeInstallStepMsiInstallation']:
"""
Installs an MSI file.
Structure is documented below.
"""
return pulumi.get(self, "msi_installation")
@property
@pulumi.getter(name="rpmInstallation")
def rpm_installation(self) -> Optional['outputs.GuestPoliciesRecipeInstallStepRpmInstallation']:
"""
Installs an rpm file via the rpm utility.
Structure is documented below.
"""
return pulumi.get(self, "rpm_installation")
@property
@pulumi.getter(name="scriptRun")
def script_run(self) -> Optional['outputs.GuestPoliciesRecipeInstallStepScriptRun']:
"""
Runs commands in a shell.
Structure is documented below.
"""
return pulumi.get(self, "script_run")
@pulumi.output_type
class GuestPoliciesRecipeInstallStepArchiveExtraction(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeInstallStepArchiveExtraction. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeInstallStepArchiveExtraction.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeInstallStepArchiveExtraction.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str,
type: str,
destination: Optional[str] = None):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
:param str type: The type of the archive to extract.
Possible values are `TAR`, `TAR_GZIP`, `TAR_BZIP`, `TAR_LZMA`, `TAR_XZ`, and `ZIP`.
:param str destination: Directory to extract archive to. Defaults to / on Linux or C:\ on Windows.
"""
pulumi.set(__self__, "artifact_id", artifact_id)
pulumi.set(__self__, "type", type)
if destination is not None:
pulumi.set(__self__, "destination", destination)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the archive to extract.
Possible values are `TAR`, `TAR_GZIP`, `TAR_BZIP`, `TAR_LZMA`, `TAR_XZ`, and `ZIP`.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def destination(self) -> Optional[str]:
"""
Directory to extract archive to. Defaults to / on Linux or C:\ on Windows.
"""
return pulumi.get(self, "destination")
@pulumi.output_type
class GuestPoliciesRecipeInstallStepDpkgInstallation(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeInstallStepDpkgInstallation. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeInstallStepDpkgInstallation.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeInstallStepDpkgInstallation.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
"""
pulumi.set(__self__, "artifact_id", artifact_id)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@pulumi.output_type
class GuestPoliciesRecipeInstallStepFileCopy(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeInstallStepFileCopy. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeInstallStepFileCopy.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeInstallStepFileCopy.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str,
destination: str,
overwrite: Optional[bool] = None,
permissions: Optional[str] = None):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
:param str destination: Directory to extract archive to. Defaults to / on Linux or C:\ on Windows.
:param bool overwrite: Whether to allow this step to overwrite existing files.If this is false and the file already exists the file
is not overwritten and the step is considered a success. Defaults to false.
:param str permissions: Consists of three octal digits which represent, in order, the permissions of the owner, group, and other users
for the file (similarly to the numeric mode used in the linux chmod utility). Each digit represents a three bit
number with the 4 bit corresponding to the read permissions, the 2 bit corresponds to the write bit, and the one
bit corresponds to the execute permission. Default behavior is 755.
Below are some examples of permissions and their associated values:
read, write, and execute: 7 read and execute: 5 read and write: 6 read only: 4
"""
pulumi.set(__self__, "artifact_id", artifact_id)
pulumi.set(__self__, "destination", destination)
if overwrite is not None:
pulumi.set(__self__, "overwrite", overwrite)
if permissions is not None:
pulumi.set(__self__, "permissions", permissions)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@property
@pulumi.getter
def destination(self) -> str:
"""
Directory to extract archive to. Defaults to / on Linux or C:\ on Windows.
"""
return pulumi.get(self, "destination")
@property
@pulumi.getter
def overwrite(self) -> Optional[bool]:
"""
Whether to allow this step to overwrite existing files.If this is false and the file already exists the file
is not overwritten and the step is considered a success. Defaults to false.
"""
return pulumi.get(self, "overwrite")
@property
@pulumi.getter
def permissions(self) -> Optional[str]:
"""
Consists of three octal digits which represent, in order, the permissions of the owner, group, and other users
for the file (similarly to the numeric mode used in the linux chmod utility). Each digit represents a three bit
number with the 4 bit corresponding to the read permissions, the 2 bit corresponds to the write bit, and the one
bit corresponds to the execute permission. Default behavior is 755.
Below are some examples of permissions and their associated values:
read, write, and execute: 7 read and execute: 5 read and write: 6 read only: 4
"""
return pulumi.get(self, "permissions")
@pulumi.output_type
class GuestPoliciesRecipeInstallStepFileExec(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowedExitCodes":
suggest = "allowed_exit_codes"
elif key == "artifactId":
suggest = "artifact_id"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeInstallStepFileExec. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeInstallStepFileExec.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeInstallStepFileExec.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allowed_exit_codes: Optional[str] = None,
args: Optional[Sequence[str]] = None,
artifact_id: Optional[str] = None,
local_path: Optional[str] = None):
"""
:param str allowed_exit_codes: Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
:param Sequence[str] args: Arguments to be passed to the provided executable.
:param str artifact_id: The id of the relevant artifact in the recipe.
:param str local_path: The absolute path of the file on the local filesystem.
"""
if allowed_exit_codes is not None:
pulumi.set(__self__, "allowed_exit_codes", allowed_exit_codes)
if args is not None:
pulumi.set(__self__, "args", args)
if artifact_id is not None:
pulumi.set(__self__, "artifact_id", artifact_id)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
@property
@pulumi.getter(name="allowedExitCodes")
def allowed_exit_codes(self) -> Optional[str]:
"""
Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
"""
return pulumi.get(self, "allowed_exit_codes")
@property
@pulumi.getter
def args(self) -> Optional[Sequence[str]]:
"""
Arguments to be passed to the provided executable.
"""
return pulumi.get(self, "args")
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> Optional[str]:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
The absolute path of the file on the local filesystem.
"""
return pulumi.get(self, "local_path")
@pulumi.output_type
class GuestPoliciesRecipeInstallStepMsiInstallation(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
elif key == "allowedExitCodes":
suggest = "allowed_exit_codes"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeInstallStepMsiInstallation. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeInstallStepMsiInstallation.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeInstallStepMsiInstallation.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str,
allowed_exit_codes: Optional[Sequence[int]] = None,
flags: Optional[Sequence[str]] = None):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
:param Sequence[int] allowed_exit_codes: Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
:param Sequence[str] flags: The flags to use when installing the MSI. Defaults to the install flag.
"""
pulumi.set(__self__, "artifact_id", artifact_id)
if allowed_exit_codes is not None:
pulumi.set(__self__, "allowed_exit_codes", allowed_exit_codes)
if flags is not None:
pulumi.set(__self__, "flags", flags)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@property
@pulumi.getter(name="allowedExitCodes")
def allowed_exit_codes(self) -> Optional[Sequence[int]]:
"""
Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
"""
return pulumi.get(self, "allowed_exit_codes")
@property
@pulumi.getter
def flags(self) -> Optional[Sequence[str]]:
"""
The flags to use when installing the MSI. Defaults to the install flag.
"""
return pulumi.get(self, "flags")
@pulumi.output_type
class GuestPoliciesRecipeInstallStepRpmInstallation(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeInstallStepRpmInstallation. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeInstallStepRpmInstallation.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeInstallStepRpmInstallation.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
"""
pulumi.set(__self__, "artifact_id", artifact_id)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@pulumi.output_type
class GuestPoliciesRecipeInstallStepScriptRun(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowedExitCodes":
suggest = "allowed_exit_codes"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeInstallStepScriptRun. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeInstallStepScriptRun.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeInstallStepScriptRun.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
script: str,
allowed_exit_codes: Optional[Sequence[int]] = None,
interpreter: Optional[str] = None):
"""
:param str script: The shell script to be executed.
:param Sequence[int] allowed_exit_codes: Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
:param str interpreter: The script interpreter to use to run the script. If no interpreter is specified the script is executed directly,
which likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
"""
pulumi.set(__self__, "script", script)
if allowed_exit_codes is not None:
pulumi.set(__self__, "allowed_exit_codes", allowed_exit_codes)
if interpreter is not None:
pulumi.set(__self__, "interpreter", interpreter)
@property
@pulumi.getter
def script(self) -> str:
"""
The shell script to be executed.
"""
return pulumi.get(self, "script")
@property
@pulumi.getter(name="allowedExitCodes")
def allowed_exit_codes(self) -> Optional[Sequence[int]]:
"""
Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
"""
return pulumi.get(self, "allowed_exit_codes")
@property
@pulumi.getter
def interpreter(self) -> Optional[str]:
"""
The script interpreter to use to run the script. If no interpreter is specified the script is executed directly,
which likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
"""
return pulumi.get(self, "interpreter")
@pulumi.output_type
class GuestPoliciesRecipeUpdateStep(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "archiveExtraction":
suggest = "archive_extraction"
elif key == "dpkgInstallation":
suggest = "dpkg_installation"
elif key == "fileCopy":
suggest = "file_copy"
elif key == "fileExec":
suggest = "file_exec"
elif key == "msiInstallation":
suggest = "msi_installation"
elif key == "rpmInstallation":
suggest = "rpm_installation"
elif key == "scriptRun":
suggest = "script_run"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeUpdateStep. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeUpdateStep.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeUpdateStep.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
archive_extraction: Optional['outputs.GuestPoliciesRecipeUpdateStepArchiveExtraction'] = None,
dpkg_installation: Optional['outputs.GuestPoliciesRecipeUpdateStepDpkgInstallation'] = None,
file_copy: Optional['outputs.GuestPoliciesRecipeUpdateStepFileCopy'] = None,
file_exec: Optional['outputs.GuestPoliciesRecipeUpdateStepFileExec'] = None,
msi_installation: Optional['outputs.GuestPoliciesRecipeUpdateStepMsiInstallation'] = None,
rpm_installation: Optional['outputs.GuestPoliciesRecipeUpdateStepRpmInstallation'] = None,
script_run: Optional['outputs.GuestPoliciesRecipeUpdateStepScriptRun'] = None):
"""
:param 'GuestPoliciesRecipeUpdateStepArchiveExtractionArgs' archive_extraction: Extracts an archive into the specified directory.
Structure is documented below.
:param 'GuestPoliciesRecipeUpdateStepDpkgInstallationArgs' dpkg_installation: Installs a deb file via dpkg.
Structure is documented below.
:param 'GuestPoliciesRecipeUpdateStepFileCopyArgs' file_copy: Copies a file onto the instance.
Structure is documented below.
:param 'GuestPoliciesRecipeUpdateStepFileExecArgs' file_exec: Executes an artifact or local file.
Structure is documented below.
:param 'GuestPoliciesRecipeUpdateStepMsiInstallationArgs' msi_installation: Installs an MSI file.
Structure is documented below.
:param 'GuestPoliciesRecipeUpdateStepRpmInstallationArgs' rpm_installation: Installs an rpm file via the rpm utility.
Structure is documented below.
:param 'GuestPoliciesRecipeUpdateStepScriptRunArgs' script_run: Runs commands in a shell.
Structure is documented below.
"""
if archive_extraction is not None:
pulumi.set(__self__, "archive_extraction", archive_extraction)
if dpkg_installation is not None:
pulumi.set(__self__, "dpkg_installation", dpkg_installation)
if file_copy is not None:
pulumi.set(__self__, "file_copy", file_copy)
if file_exec is not None:
pulumi.set(__self__, "file_exec", file_exec)
if msi_installation is not None:
pulumi.set(__self__, "msi_installation", msi_installation)
if rpm_installation is not None:
pulumi.set(__self__, "rpm_installation", rpm_installation)
if script_run is not None:
pulumi.set(__self__, "script_run", script_run)
@property
@pulumi.getter(name="archiveExtraction")
def archive_extraction(self) -> Optional['outputs.GuestPoliciesRecipeUpdateStepArchiveExtraction']:
"""
Extracts an archive into the specified directory.
Structure is documented below.
"""
return pulumi.get(self, "archive_extraction")
@property
@pulumi.getter(name="dpkgInstallation")
def dpkg_installation(self) -> Optional['outputs.GuestPoliciesRecipeUpdateStepDpkgInstallation']:
"""
Installs a deb file via dpkg.
Structure is documented below.
"""
return pulumi.get(self, "dpkg_installation")
@property
@pulumi.getter(name="fileCopy")
def file_copy(self) -> Optional['outputs.GuestPoliciesRecipeUpdateStepFileCopy']:
"""
Copies a file onto the instance.
Structure is documented below.
"""
return pulumi.get(self, "file_copy")
@property
@pulumi.getter(name="fileExec")
def file_exec(self) -> Optional['outputs.GuestPoliciesRecipeUpdateStepFileExec']:
"""
Executes an artifact or local file.
Structure is documented below.
"""
return pulumi.get(self, "file_exec")
@property
@pulumi.getter(name="msiInstallation")
def msi_installation(self) -> Optional['outputs.GuestPoliciesRecipeUpdateStepMsiInstallation']:
"""
Installs an MSI file.
Structure is documented below.
"""
return pulumi.get(self, "msi_installation")
@property
@pulumi.getter(name="rpmInstallation")
def rpm_installation(self) -> Optional['outputs.GuestPoliciesRecipeUpdateStepRpmInstallation']:
"""
Installs an rpm file via the rpm utility.
Structure is documented below.
"""
return pulumi.get(self, "rpm_installation")
@property
@pulumi.getter(name="scriptRun")
def script_run(self) -> Optional['outputs.GuestPoliciesRecipeUpdateStepScriptRun']:
"""
Runs commands in a shell.
Structure is documented below.
"""
return pulumi.get(self, "script_run")
@pulumi.output_type
class GuestPoliciesRecipeUpdateStepArchiveExtraction(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeUpdateStepArchiveExtraction. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeUpdateStepArchiveExtraction.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeUpdateStepArchiveExtraction.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str,
type: str,
destination: Optional[str] = None):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
:param str type: The type of the archive to extract.
Possible values are `TAR`, `TAR_GZIP`, `TAR_BZIP`, `TAR_LZMA`, `TAR_XZ`, and `ZIP`.
:param str destination: Directory to extract archive to. Defaults to / on Linux or C:\ on Windows.
"""
pulumi.set(__self__, "artifact_id", artifact_id)
pulumi.set(__self__, "type", type)
if destination is not None:
pulumi.set(__self__, "destination", destination)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the archive to extract.
Possible values are `TAR`, `TAR_GZIP`, `TAR_BZIP`, `TAR_LZMA`, `TAR_XZ`, and `ZIP`.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def destination(self) -> Optional[str]:
"""
Directory to extract archive to. Defaults to / on Linux or C:\ on Windows.
"""
return pulumi.get(self, "destination")
@pulumi.output_type
class GuestPoliciesRecipeUpdateStepDpkgInstallation(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeUpdateStepDpkgInstallation. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeUpdateStepDpkgInstallation.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeUpdateStepDpkgInstallation.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
"""
pulumi.set(__self__, "artifact_id", artifact_id)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@pulumi.output_type
class GuestPoliciesRecipeUpdateStepFileCopy(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeUpdateStepFileCopy. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeUpdateStepFileCopy.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeUpdateStepFileCopy.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str,
destination: str,
overwrite: Optional[bool] = None,
permissions: Optional[str] = None):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
:param str destination: Directory to extract archive to. Defaults to / on Linux or C:\ on Windows.
:param bool overwrite: Whether to allow this step to overwrite existing files.If this is false and the file already exists the file
is not overwritten and the step is considered a success. Defaults to false.
:param str permissions: Consists of three octal digits which represent, in order, the permissions of the owner, group, and other users
for the file (similarly to the numeric mode used in the linux chmod utility). Each digit represents a three bit
number with the 4 bit corresponding to the read permissions, the 2 bit corresponds to the write bit, and the one
bit corresponds to the execute permission. Default behavior is 755.
Below are some examples of permissions and their associated values:
read, write, and execute: 7 read and execute: 5 read and write: 6 read only: 4
"""
pulumi.set(__self__, "artifact_id", artifact_id)
pulumi.set(__self__, "destination", destination)
if overwrite is not None:
pulumi.set(__self__, "overwrite", overwrite)
if permissions is not None:
pulumi.set(__self__, "permissions", permissions)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@property
@pulumi.getter
def destination(self) -> str:
"""
Directory to extract archive to. Defaults to / on Linux or C:\ on Windows.
"""
return pulumi.get(self, "destination")
@property
@pulumi.getter
def overwrite(self) -> Optional[bool]:
"""
Whether to allow this step to overwrite existing files.If this is false and the file already exists the file
is not overwritten and the step is considered a success. Defaults to false.
"""
return pulumi.get(self, "overwrite")
@property
@pulumi.getter
def permissions(self) -> Optional[str]:
"""
Consists of three octal digits which represent, in order, the permissions of the owner, group, and other users
for the file (similarly to the numeric mode used in the linux chmod utility). Each digit represents a three bit
number with the 4 bit corresponding to the read permissions, the 2 bit corresponds to the write bit, and the one
bit corresponds to the execute permission. Default behavior is 755.
Below are some examples of permissions and their associated values:
read, write, and execute: 7 read and execute: 5 read and write: 6 read only: 4
"""
return pulumi.get(self, "permissions")
@pulumi.output_type
class GuestPoliciesRecipeUpdateStepFileExec(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowedExitCodes":
suggest = "allowed_exit_codes"
elif key == "artifactId":
suggest = "artifact_id"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeUpdateStepFileExec. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeUpdateStepFileExec.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeUpdateStepFileExec.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allowed_exit_codes: Optional[Sequence[int]] = None,
args: Optional[Sequence[str]] = None,
artifact_id: Optional[str] = None,
local_path: Optional[str] = None):
"""
:param Sequence[int] allowed_exit_codes: Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
:param Sequence[str] args: Arguments to be passed to the provided executable.
:param str artifact_id: The id of the relevant artifact in the recipe.
:param str local_path: The absolute path of the file on the local filesystem.
"""
if allowed_exit_codes is not None:
pulumi.set(__self__, "allowed_exit_codes", allowed_exit_codes)
if args is not None:
pulumi.set(__self__, "args", args)
if artifact_id is not None:
pulumi.set(__self__, "artifact_id", artifact_id)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
@property
@pulumi.getter(name="allowedExitCodes")
def allowed_exit_codes(self) -> Optional[Sequence[int]]:
"""
Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
"""
return pulumi.get(self, "allowed_exit_codes")
@property
@pulumi.getter
def args(self) -> Optional[Sequence[str]]:
"""
Arguments to be passed to the provided executable.
"""
return pulumi.get(self, "args")
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> Optional[str]:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
The absolute path of the file on the local filesystem.
"""
return pulumi.get(self, "local_path")
@pulumi.output_type
class GuestPoliciesRecipeUpdateStepMsiInstallation(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
elif key == "allowedExitCodes":
suggest = "allowed_exit_codes"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeUpdateStepMsiInstallation. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeUpdateStepMsiInstallation.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeUpdateStepMsiInstallation.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str,
allowed_exit_codes: Optional[Sequence[int]] = None,
flags: Optional[Sequence[str]] = None):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
:param Sequence[int] allowed_exit_codes: Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
:param Sequence[str] flags: The flags to use when installing the MSI. Defaults to the install flag.
"""
pulumi.set(__self__, "artifact_id", artifact_id)
if allowed_exit_codes is not None:
pulumi.set(__self__, "allowed_exit_codes", allowed_exit_codes)
if flags is not None:
pulumi.set(__self__, "flags", flags)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@property
@pulumi.getter(name="allowedExitCodes")
def allowed_exit_codes(self) -> Optional[Sequence[int]]:
"""
Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
"""
return pulumi.get(self, "allowed_exit_codes")
@property
@pulumi.getter
def flags(self) -> Optional[Sequence[str]]:
"""
The flags to use when installing the MSI. Defaults to the install flag.
"""
return pulumi.get(self, "flags")
@pulumi.output_type
class GuestPoliciesRecipeUpdateStepRpmInstallation(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "artifactId":
suggest = "artifact_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeUpdateStepRpmInstallation. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeUpdateStepRpmInstallation.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeUpdateStepRpmInstallation.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
artifact_id: str):
"""
:param str artifact_id: The id of the relevant artifact in the recipe.
"""
pulumi.set(__self__, "artifact_id", artifact_id)
@property
@pulumi.getter(name="artifactId")
def artifact_id(self) -> str:
"""
The id of the relevant artifact in the recipe.
"""
return pulumi.get(self, "artifact_id")
@pulumi.output_type
class GuestPoliciesRecipeUpdateStepScriptRun(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowedExitCodes":
suggest = "allowed_exit_codes"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GuestPoliciesRecipeUpdateStepScriptRun. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GuestPoliciesRecipeUpdateStepScriptRun.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GuestPoliciesRecipeUpdateStepScriptRun.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
script: str,
allowed_exit_codes: Optional[Sequence[int]] = None,
interpreter: Optional[str] = None):
"""
:param str script: The shell script to be executed.
:param Sequence[int] allowed_exit_codes: Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
:param str interpreter: The script interpreter to use to run the script. If no interpreter is specified the script is executed directly,
which likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
"""
pulumi.set(__self__, "script", script)
if allowed_exit_codes is not None:
pulumi.set(__self__, "allowed_exit_codes", allowed_exit_codes)
if interpreter is not None:
pulumi.set(__self__, "interpreter", interpreter)
@property
@pulumi.getter
def script(self) -> str:
"""
The shell script to be executed.
"""
return pulumi.get(self, "script")
@property
@pulumi.getter(name="allowedExitCodes")
def allowed_exit_codes(self) -> Optional[Sequence[int]]:
"""
Return codes that indicate that the software installed or updated successfully. Behaviour defaults to [0]
"""
return pulumi.get(self, "allowed_exit_codes")
@property
@pulumi.getter
def interpreter(self) -> Optional[str]:
"""
The script interpreter to use to run the script. If no interpreter is specified the script is executed directly,
which likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
"""
return pulumi.get(self, "interpreter")
@pulumi.output_type
class OsPolicyAssignmentInstanceFilter(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "exclusionLabels":
suggest = "exclusion_labels"
elif key == "inclusionLabels":
suggest = "inclusion_labels"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentInstanceFilter. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentInstanceFilter.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentInstanceFilter.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
all: Optional[bool] = None,
exclusion_labels: Optional[Sequence['outputs.OsPolicyAssignmentInstanceFilterExclusionLabel']] = None,
inclusion_labels: Optional[Sequence['outputs.OsPolicyAssignmentInstanceFilterInclusionLabel']] = None,
inventories: Optional[Sequence['outputs.OsPolicyAssignmentInstanceFilterInventory']] = None):
"""
:param bool all: Target all VMs in the project. If true, no other criteria is permitted.
:param Sequence['OsPolicyAssignmentInstanceFilterExclusionLabelArgs'] exclusion_labels: List of label sets used for VM exclusion. If the list has more than one label set, the VM is excluded if any of the label sets are applicable for the VM.
:param Sequence['OsPolicyAssignmentInstanceFilterInclusionLabelArgs'] inclusion_labels: List of label sets used for VM inclusion. If the list has more than one `LabelSet`, the VM is included if any of the label sets are applicable for the VM.
:param Sequence['OsPolicyAssignmentInstanceFilterInventoryArgs'] inventories: List of inventories to select VMs. A VM is selected if its inventory data matches at least one of the following inventories.
"""
if all is not None:
pulumi.set(__self__, "all", all)
if exclusion_labels is not None:
pulumi.set(__self__, "exclusion_labels", exclusion_labels)
if inclusion_labels is not None:
pulumi.set(__self__, "inclusion_labels", inclusion_labels)
if inventories is not None:
pulumi.set(__self__, "inventories", inventories)
@property
@pulumi.getter
def all(self) -> Optional[bool]:
"""
Target all VMs in the project. If true, no other criteria is permitted.
"""
return pulumi.get(self, "all")
@property
@pulumi.getter(name="exclusionLabels")
def exclusion_labels(self) -> Optional[Sequence['outputs.OsPolicyAssignmentInstanceFilterExclusionLabel']]:
"""
List of label sets used for VM exclusion. If the list has more than one label set, the VM is excluded if any of the label sets are applicable for the VM.
"""
return pulumi.get(self, "exclusion_labels")
@property
@pulumi.getter(name="inclusionLabels")
def inclusion_labels(self) -> Optional[Sequence['outputs.OsPolicyAssignmentInstanceFilterInclusionLabel']]:
"""
List of label sets used for VM inclusion. If the list has more than one `LabelSet`, the VM is included if any of the label sets are applicable for the VM.
"""
return pulumi.get(self, "inclusion_labels")
@property
@pulumi.getter
def inventories(self) -> Optional[Sequence['outputs.OsPolicyAssignmentInstanceFilterInventory']]:
"""
List of inventories to select VMs. A VM is selected if its inventory data matches at least one of the following inventories.
"""
return pulumi.get(self, "inventories")
@pulumi.output_type
class OsPolicyAssignmentInstanceFilterExclusionLabel(dict):
def __init__(__self__, *,
labels: Optional[Mapping[str, str]] = None):
"""
:param Mapping[str, str] labels: Labels are identified by key/value pairs in this map. A VM should contain all the key/value pairs specified in this map to be selected.
"""
if labels is not None:
pulumi.set(__self__, "labels", labels)
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, str]]:
"""
Labels are identified by key/value pairs in this map. A VM should contain all the key/value pairs specified in this map to be selected.
"""
return pulumi.get(self, "labels")
@pulumi.output_type
class OsPolicyAssignmentInstanceFilterInclusionLabel(dict):
def __init__(__self__, *,
labels: Optional[Mapping[str, str]] = None):
"""
:param Mapping[str, str] labels: Labels are identified by key/value pairs in this map. A VM should contain all the key/value pairs specified in this map to be selected.
"""
if labels is not None:
pulumi.set(__self__, "labels", labels)
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, str]]:
"""
Labels are identified by key/value pairs in this map. A VM should contain all the key/value pairs specified in this map to be selected.
"""
return pulumi.get(self, "labels")
@pulumi.output_type
class OsPolicyAssignmentInstanceFilterInventory(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "osShortName":
suggest = "os_short_name"
elif key == "osVersion":
suggest = "os_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentInstanceFilterInventory. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentInstanceFilterInventory.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentInstanceFilterInventory.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
os_short_name: str,
os_version: Optional[str] = None):
"""
:param str os_short_name: Required. The OS short name
:param str os_version: The OS version Prefix matches are supported if asterisk(*) is provided as the last character. For example, to match all versions with a major version of `7`, specify the following value for this field `7.*` An empty string matches all OS versions.
"""
pulumi.set(__self__, "os_short_name", os_short_name)
if os_version is not None:
pulumi.set(__self__, "os_version", os_version)
@property
@pulumi.getter(name="osShortName")
def os_short_name(self) -> str:
"""
Required. The OS short name
"""
return pulumi.get(self, "os_short_name")
@property
@pulumi.getter(name="osVersion")
def os_version(self) -> Optional[str]:
"""
The OS version Prefix matches are supported if asterisk(*) is provided as the last character. For example, to match all versions with a major version of `7`, specify the following value for this field `7.*` An empty string matches all OS versions.
"""
return pulumi.get(self, "os_version")
@pulumi.output_type
class OsPolicyAssignmentOsPolicy(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resourceGroups":
suggest = "resource_groups"
elif key == "allowNoResourceGroupMatch":
suggest = "allow_no_resource_group_match"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicy. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicy.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicy.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
mode: str,
resource_groups: Sequence['outputs.OsPolicyAssignmentOsPolicyResourceGroup'],
allow_no_resource_group_match: Optional[bool] = None,
description: Optional[str] = None):
"""
:param str id: Required. A one word, unique name for this repository. This is the `repo id` in the zypper config file and also the `display_name` if `display_name` is omitted. This id is also used as the unique identifier when checking for GuestPolicy conflicts.
:param str mode: Required. Policy mode Possible values: MODE_UNSPECIFIED, VALIDATION, ENFORCEMENT
:param Sequence['OsPolicyAssignmentOsPolicyResourceGroupArgs'] resource_groups: Required. List of resource groups for the policy. For a particular VM, resource groups are evaluated in the order specified and the first resource group that is applicable is selected and the rest are ignored. If none of the resource groups are applicable for a VM, the VM is considered to be non-compliant w.r.t this policy. This behavior can be toggled by the flag `allow_no_resource_group_match`
:param bool allow_no_resource_group_match: This flag determines the OS policy compliance status when none of the resource groups within the policy are applicable for a VM. Set this value to `true` if the policy needs to be reported as compliant even if the policy has nothing to validate or enforce.
:param str description: OS policy assignment description. Length of the description is limited to 1024 characters.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "mode", mode)
pulumi.set(__self__, "resource_groups", resource_groups)
if allow_no_resource_group_match is not None:
pulumi.set(__self__, "allow_no_resource_group_match", allow_no_resource_group_match)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def id(self) -> str:
"""
Required. A one word, unique name for this repository. This is the `repo id` in the zypper config file and also the `display_name` if `display_name` is omitted. This id is also used as the unique identifier when checking for GuestPolicy conflicts.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def mode(self) -> str:
"""
Required. Policy mode Possible values: MODE_UNSPECIFIED, VALIDATION, ENFORCEMENT
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter(name="resourceGroups")
def resource_groups(self) -> Sequence['outputs.OsPolicyAssignmentOsPolicyResourceGroup']:
"""
Required. List of resource groups for the policy. For a particular VM, resource groups are evaluated in the order specified and the first resource group that is applicable is selected and the rest are ignored. If none of the resource groups are applicable for a VM, the VM is considered to be non-compliant w.r.t this policy. This behavior can be toggled by the flag `allow_no_resource_group_match`
"""
return pulumi.get(self, "resource_groups")
@property
@pulumi.getter(name="allowNoResourceGroupMatch")
def allow_no_resource_group_match(self) -> Optional[bool]:
"""
This flag determines the OS policy compliance status when none of the resource groups within the policy are applicable for a VM. Set this value to `true` if the policy needs to be reported as compliant even if the policy has nothing to validate or enforce.
"""
return pulumi.get(self, "allow_no_resource_group_match")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
OS policy assignment description. Length of the description is limited to 1024 characters.
"""
return pulumi.get(self, "description")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroup(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "inventoryFilters":
suggest = "inventory_filters"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroup. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroup.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroup.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
resources: Sequence['outputs.OsPolicyAssignmentOsPolicyResourceGroupResource'],
inventory_filters: Optional[Sequence['outputs.OsPolicyAssignmentOsPolicyResourceGroupInventoryFilter']] = None):
"""
:param Sequence['OsPolicyAssignmentOsPolicyResourceGroupResourceArgs'] resources: Required. List of resources configured for this resource group. The resources are executed in the exact order specified here.
:param Sequence['OsPolicyAssignmentOsPolicyResourceGroupInventoryFilterArgs'] inventory_filters: List of inventory filters for the resource group. The resources in this resource group are applied to the target VM if it satisfies at least one of the following inventory filters. For example, to apply this resource group to VMs running either `RHEL` or `CentOS` operating systems, specify 2 items for the list with following values: inventory_filters[0].os_short_name='rhel' and inventory_filters[1].os_short_name='centos' If the list is empty, this resource group will be applied to the target VM unconditionally.
"""
pulumi.set(__self__, "resources", resources)
if inventory_filters is not None:
pulumi.set(__self__, "inventory_filters", inventory_filters)
@property
@pulumi.getter
def resources(self) -> Sequence['outputs.OsPolicyAssignmentOsPolicyResourceGroupResource']:
"""
Required. List of resources configured for this resource group. The resources are executed in the exact order specified here.
"""
return pulumi.get(self, "resources")
@property
@pulumi.getter(name="inventoryFilters")
def inventory_filters(self) -> Optional[Sequence['outputs.OsPolicyAssignmentOsPolicyResourceGroupInventoryFilter']]:
"""
List of inventory filters for the resource group. The resources in this resource group are applied to the target VM if it satisfies at least one of the following inventory filters. For example, to apply this resource group to VMs running either `RHEL` or `CentOS` operating systems, specify 2 items for the list with following values: inventory_filters[0].os_short_name='rhel' and inventory_filters[1].os_short_name='centos' If the list is empty, this resource group will be applied to the target VM unconditionally.
"""
return pulumi.get(self, "inventory_filters")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupInventoryFilter(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "osShortName":
suggest = "os_short_name"
elif key == "osVersion":
suggest = "os_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupInventoryFilter. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupInventoryFilter.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupInventoryFilter.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
os_short_name: str,
os_version: Optional[str] = None):
"""
:param str os_short_name: Required. The OS short name
:param str os_version: The OS version Prefix matches are supported if asterisk(*) is provided as the last character. For example, to match all versions with a major version of `7`, specify the following value for this field `7.*` An empty string matches all OS versions.
"""
pulumi.set(__self__, "os_short_name", os_short_name)
if os_version is not None:
pulumi.set(__self__, "os_version", os_version)
@property
@pulumi.getter(name="osShortName")
def os_short_name(self) -> str:
"""
Required. The OS short name
"""
return pulumi.get(self, "os_short_name")
@property
@pulumi.getter(name="osVersion")
def os_version(self) -> Optional[str]:
"""
The OS version Prefix matches are supported if asterisk(*) is provided as the last character. For example, to match all versions with a major version of `7`, specify the following value for this field `7.*` An empty string matches all OS versions.
"""
return pulumi.get(self, "os_version")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResource(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "exec":
suggest = "exec_"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResource. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResource.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResource.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
exec_: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExec'] = None,
file: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceFile'] = None,
pkg: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkg'] = None,
repository: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepository'] = None):
"""
:param str id: Required. A one word, unique name for this repository. This is the `repo id` in the zypper config file and also the `display_name` if `display_name` is omitted. This id is also used as the unique identifier when checking for GuestPolicy conflicts.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceExecArgs' exec_: Exec resource
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceFileArgs' file: A remote or local source.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgArgs' pkg: Package resource
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryArgs' repository: Package repository resource
"""
pulumi.set(__self__, "id", id)
if exec_ is not None:
pulumi.set(__self__, "exec_", exec_)
if file is not None:
pulumi.set(__self__, "file", file)
if pkg is not None:
pulumi.set(__self__, "pkg", pkg)
if repository is not None:
pulumi.set(__self__, "repository", repository)
@property
@pulumi.getter
def id(self) -> str:
"""
Required. A one word, unique name for this repository. This is the `repo id` in the zypper config file and also the `display_name` if `display_name` is omitted. This id is also used as the unique identifier when checking for GuestPolicy conflicts.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="exec")
def exec_(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExec']:
"""
Exec resource
"""
return pulumi.get(self, "exec_")
@property
@pulumi.getter
def file(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceFile']:
"""
A remote or local source.
"""
return pulumi.get(self, "file")
@property
@pulumi.getter
def pkg(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkg']:
"""
Package resource
"""
return pulumi.get(self, "pkg")
@property
@pulumi.getter
def repository(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepository']:
"""
Package repository resource
"""
return pulumi.get(self, "repository")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceExec(dict):
def __init__(__self__, *,
validate: 'outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidate',
enforce: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforce'] = None):
"""
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateArgs' validate: Required. What to run to validate this resource is in the desired state. An exit code of 100 indicates "in desired state", and exit code of 101 indicates "not in desired state". Any other exit code indicates a failure running validate.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceArgs' enforce: What to run to bring this resource into the desired state. An exit code of 100 indicates "success", any other exit code indicates a failure running enforce.
"""
pulumi.set(__self__, "validate", validate)
if enforce is not None:
pulumi.set(__self__, "enforce", enforce)
@property
@pulumi.getter
def validate(self) -> 'outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidate':
"""
Required. What to run to validate this resource is in the desired state. An exit code of 100 indicates "in desired state", and exit code of 101 indicates "not in desired state". Any other exit code indicates a failure running validate.
"""
return pulumi.get(self, "validate")
@property
@pulumi.getter
def enforce(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforce']:
"""
What to run to bring this resource into the desired state. An exit code of 100 indicates "success", any other exit code indicates a failure running enforce.
"""
return pulumi.get(self, "enforce")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforce(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "outputFilePath":
suggest = "output_file_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforce. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforce.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforce.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
interpreter: str,
args: Optional[Sequence[str]] = None,
file: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFile'] = None,
output_file_path: Optional[str] = None,
script: Optional[str] = None):
"""
:param str interpreter: Required. The script interpreter to use. Possible values: INTERPRETER_UNSPECIFIED, NONE, SHELL, POWERSHELL
:param Sequence[str] args: Optional arguments to pass to the source during execution.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileArgs' file: A remote or local source.
:param str output_file_path: Only recorded for enforce Exec. Path to an output file (that is created by this Exec) whose content will be recorded in OSPolicyResourceCompliance after a successful run. Absence or failure to read this file will result in this ExecResource being non-compliant. Output file size is limited to 100K bytes.
:param str script: An inline script. The size of the script is limited to 1024 characters.
"""
pulumi.set(__self__, "interpreter", interpreter)
if args is not None:
pulumi.set(__self__, "args", args)
if file is not None:
pulumi.set(__self__, "file", file)
if output_file_path is not None:
pulumi.set(__self__, "output_file_path", output_file_path)
if script is not None:
pulumi.set(__self__, "script", script)
@property
@pulumi.getter
def interpreter(self) -> str:
"""
Required. The script interpreter to use. Possible values: INTERPRETER_UNSPECIFIED, NONE, SHELL, POWERSHELL
"""
return pulumi.get(self, "interpreter")
@property
@pulumi.getter
def args(self) -> Optional[Sequence[str]]:
"""
Optional arguments to pass to the source during execution.
"""
return pulumi.get(self, "args")
@property
@pulumi.getter
def file(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFile']:
"""
A remote or local source.
"""
return pulumi.get(self, "file")
@property
@pulumi.getter(name="outputFilePath")
def output_file_path(self) -> Optional[str]:
"""
Only recorded for enforce Exec. Path to an output file (that is created by this Exec) whose content will be recorded in OSPolicyResourceCompliance after a successful run. Absence or failure to read this file will result in this ExecResource being non-compliant. Output file size is limited to 100K bytes.
"""
return pulumi.get(self, "output_file_path")
@property
@pulumi.getter
def script(self) -> Optional[str]:
"""
An inline script. The size of the script is limited to 1024 characters.
"""
return pulumi.get(self, "script")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFile(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowInsecure":
suggest = "allow_insecure"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFile. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFile.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFile.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allow_insecure: Optional[bool] = None,
gcs: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileGcs'] = None,
local_path: Optional[str] = None,
remote: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileRemote'] = None):
"""
:param bool allow_insecure: Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileGcsArgs' gcs: A Cloud Storage object.
:param str local_path: A local path within the VM to use.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileRemoteArgs' remote: A generic remote file.
"""
if allow_insecure is not None:
pulumi.set(__self__, "allow_insecure", allow_insecure)
if gcs is not None:
pulumi.set(__self__, "gcs", gcs)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
if remote is not None:
pulumi.set(__self__, "remote", remote)
@property
@pulumi.getter(name="allowInsecure")
def allow_insecure(self) -> Optional[bool]:
"""
Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
"""
return pulumi.get(self, "allow_insecure")
@property
@pulumi.getter
def gcs(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileGcs']:
"""
A Cloud Storage object.
"""
return pulumi.get(self, "gcs")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
A local path within the VM to use.
"""
return pulumi.get(self, "local_path")
@property
@pulumi.getter
def remote(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileRemote']:
"""
A generic remote file.
"""
return pulumi.get(self, "remote")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileGcs(dict):
def __init__(__self__, *,
bucket: str,
object: str,
generation: Optional[int] = None):
"""
:param str bucket: Required. Bucket of the Cloud Storage object.
:param str object: Required. Name of the Cloud Storage object.
:param int generation: Generation number of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "object", object)
if generation is not None:
pulumi.set(__self__, "generation", generation)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Required. Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def object(self) -> str:
"""
Required. Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@property
@pulumi.getter
def generation(self) -> Optional[int]:
"""
Generation number of the Cloud Storage object.
"""
return pulumi.get(self, "generation")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileRemote(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sha256Checksum":
suggest = "sha256_checksum"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileRemote. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileRemote.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecEnforceFileRemote.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
uri: str,
sha256_checksum: Optional[str] = None):
"""
:param str uri: Required. URI for this repository.
:param str sha256_checksum: SHA256 checksum of the remote file.
"""
pulumi.set(__self__, "uri", uri)
if sha256_checksum is not None:
pulumi.set(__self__, "sha256_checksum", sha256_checksum)
@property
@pulumi.getter
def uri(self) -> str:
"""
Required. URI for this repository.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="sha256Checksum")
def sha256_checksum(self) -> Optional[str]:
"""
SHA256 checksum of the remote file.
"""
return pulumi.get(self, "sha256_checksum")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidate(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "outputFilePath":
suggest = "output_file_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidate. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidate.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidate.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
interpreter: str,
args: Optional[Sequence[str]] = None,
file: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFile'] = None,
output_file_path: Optional[str] = None,
script: Optional[str] = None):
"""
:param str interpreter: Required. The script interpreter to use. Possible values: INTERPRETER_UNSPECIFIED, NONE, SHELL, POWERSHELL
:param Sequence[str] args: Optional arguments to pass to the source during execution.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileArgs' file: A remote or local source.
:param str output_file_path: Only recorded for enforce Exec. Path to an output file (that is created by this Exec) whose content will be recorded in OSPolicyResourceCompliance after a successful run. Absence or failure to read this file will result in this ExecResource being non-compliant. Output file size is limited to 100K bytes.
:param str script: An inline script. The size of the script is limited to 1024 characters.
"""
pulumi.set(__self__, "interpreter", interpreter)
if args is not None:
pulumi.set(__self__, "args", args)
if file is not None:
pulumi.set(__self__, "file", file)
if output_file_path is not None:
pulumi.set(__self__, "output_file_path", output_file_path)
if script is not None:
pulumi.set(__self__, "script", script)
@property
@pulumi.getter
def interpreter(self) -> str:
"""
Required. The script interpreter to use. Possible values: INTERPRETER_UNSPECIFIED, NONE, SHELL, POWERSHELL
"""
return pulumi.get(self, "interpreter")
@property
@pulumi.getter
def args(self) -> Optional[Sequence[str]]:
"""
Optional arguments to pass to the source during execution.
"""
return pulumi.get(self, "args")
@property
@pulumi.getter
def file(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFile']:
"""
A remote or local source.
"""
return pulumi.get(self, "file")
@property
@pulumi.getter(name="outputFilePath")
def output_file_path(self) -> Optional[str]:
"""
Only recorded for enforce Exec. Path to an output file (that is created by this Exec) whose content will be recorded in OSPolicyResourceCompliance after a successful run. Absence or failure to read this file will result in this ExecResource being non-compliant. Output file size is limited to 100K bytes.
"""
return pulumi.get(self, "output_file_path")
@property
@pulumi.getter
def script(self) -> Optional[str]:
"""
An inline script. The size of the script is limited to 1024 characters.
"""
return pulumi.get(self, "script")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFile(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowInsecure":
suggest = "allow_insecure"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFile. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFile.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFile.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allow_insecure: Optional[bool] = None,
gcs: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileGcs'] = None,
local_path: Optional[str] = None,
remote: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileRemote'] = None):
"""
:param bool allow_insecure: Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileGcsArgs' gcs: A Cloud Storage object.
:param str local_path: A local path within the VM to use.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileRemoteArgs' remote: A generic remote file.
"""
if allow_insecure is not None:
pulumi.set(__self__, "allow_insecure", allow_insecure)
if gcs is not None:
pulumi.set(__self__, "gcs", gcs)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
if remote is not None:
pulumi.set(__self__, "remote", remote)
@property
@pulumi.getter(name="allowInsecure")
def allow_insecure(self) -> Optional[bool]:
"""
Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
"""
return pulumi.get(self, "allow_insecure")
@property
@pulumi.getter
def gcs(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileGcs']:
"""
A Cloud Storage object.
"""
return pulumi.get(self, "gcs")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
A local path within the VM to use.
"""
return pulumi.get(self, "local_path")
@property
@pulumi.getter
def remote(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileRemote']:
"""
A generic remote file.
"""
return pulumi.get(self, "remote")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileGcs(dict):
def __init__(__self__, *,
bucket: str,
object: str,
generation: Optional[int] = None):
"""
:param str bucket: Required. Bucket of the Cloud Storage object.
:param str object: Required. Name of the Cloud Storage object.
:param int generation: Generation number of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "object", object)
if generation is not None:
pulumi.set(__self__, "generation", generation)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Required. Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def object(self) -> str:
"""
Required. Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@property
@pulumi.getter
def generation(self) -> Optional[int]:
"""
Generation number of the Cloud Storage object.
"""
return pulumi.get(self, "generation")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileRemote(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sha256Checksum":
suggest = "sha256_checksum"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileRemote. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileRemote.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceExecValidateFileRemote.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
uri: str,
sha256_checksum: Optional[str] = None):
"""
:param str uri: Required. URI for this repository.
:param str sha256_checksum: SHA256 checksum of the remote file.
"""
pulumi.set(__self__, "uri", uri)
if sha256_checksum is not None:
pulumi.set(__self__, "sha256_checksum", sha256_checksum)
@property
@pulumi.getter
def uri(self) -> str:
"""
Required. URI for this repository.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="sha256Checksum")
def sha256_checksum(self) -> Optional[str]:
"""
SHA256 checksum of the remote file.
"""
return pulumi.get(self, "sha256_checksum")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceFile(dict):
def __init__(__self__, *,
path: str,
state: str,
content: Optional[str] = None,
file: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceFileFile'] = None,
permissions: Optional[str] = None):
"""
:param str path: Required. The absolute path of the file within the VM.
:param str state: Required. Desired state of the file. Possible values: OS_POLICY_COMPLIANCE_STATE_UNSPECIFIED, COMPLIANT, NON_COMPLIANT, UNKNOWN, NO_OS_POLICIES_APPLICABLE
:param str content: A a file with this content. The size of the content is limited to 1024 characters.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileArgs' file: A remote or local source.
:param str permissions: -
Consists of three octal digits which represent, in order, the permissions of the owner, group, and other users for the file (similarly to the numeric mode used in the linux chmod utility). Each digit represents a three bit number with the 4 bit corresponding to the read permissions, the 2 bit corresponds to the write bit, and the one bit corresponds to the execute permission. Default behavior is 755. Below are some examples of permissions and their associated values: read, write, and execute: 7 read and execute: 5 read and write: 6 read only: 4
"""
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "state", state)
if content is not None:
pulumi.set(__self__, "content", content)
if file is not None:
pulumi.set(__self__, "file", file)
if permissions is not None:
pulumi.set(__self__, "permissions", permissions)
@property
@pulumi.getter
def path(self) -> str:
"""
Required. The absolute path of the file within the VM.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def state(self) -> str:
"""
Required. Desired state of the file. Possible values: OS_POLICY_COMPLIANCE_STATE_UNSPECIFIED, COMPLIANT, NON_COMPLIANT, UNKNOWN, NO_OS_POLICIES_APPLICABLE
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def content(self) -> Optional[str]:
"""
A a file with this content. The size of the content is limited to 1024 characters.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter
def file(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceFileFile']:
"""
A remote or local source.
"""
return pulumi.get(self, "file")
@property
@pulumi.getter
def permissions(self) -> Optional[str]:
"""
-
Consists of three octal digits which represent, in order, the permissions of the owner, group, and other users for the file (similarly to the numeric mode used in the linux chmod utility). Each digit represents a three bit number with the 4 bit corresponding to the read permissions, the 2 bit corresponds to the write bit, and the one bit corresponds to the execute permission. Default behavior is 755. Below are some examples of permissions and their associated values: read, write, and execute: 7 read and execute: 5 read and write: 6 read only: 4
"""
return pulumi.get(self, "permissions")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceFileFile(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowInsecure":
suggest = "allow_insecure"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceFileFile. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceFileFile.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceFileFile.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allow_insecure: Optional[bool] = None,
gcs: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileGcs'] = None,
local_path: Optional[str] = None,
remote: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileRemote'] = None):
"""
:param bool allow_insecure: Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileGcsArgs' gcs: A Cloud Storage object.
:param str local_path: A local path within the VM to use.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileRemoteArgs' remote: A generic remote file.
"""
if allow_insecure is not None:
pulumi.set(__self__, "allow_insecure", allow_insecure)
if gcs is not None:
pulumi.set(__self__, "gcs", gcs)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
if remote is not None:
pulumi.set(__self__, "remote", remote)
@property
@pulumi.getter(name="allowInsecure")
def allow_insecure(self) -> Optional[bool]:
"""
Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
"""
return pulumi.get(self, "allow_insecure")
@property
@pulumi.getter
def gcs(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileGcs']:
"""
A Cloud Storage object.
"""
return pulumi.get(self, "gcs")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
A local path within the VM to use.
"""
return pulumi.get(self, "local_path")
@property
@pulumi.getter
def remote(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileRemote']:
"""
A generic remote file.
"""
return pulumi.get(self, "remote")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileGcs(dict):
def __init__(__self__, *,
bucket: str,
object: str,
generation: Optional[int] = None):
"""
:param str bucket: Required. Bucket of the Cloud Storage object.
:param str object: Required. Name of the Cloud Storage object.
:param int generation: Generation number of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "object", object)
if generation is not None:
pulumi.set(__self__, "generation", generation)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Required. Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def object(self) -> str:
"""
Required. Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@property
@pulumi.getter
def generation(self) -> Optional[int]:
"""
Generation number of the Cloud Storage object.
"""
return pulumi.get(self, "generation")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileRemote(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sha256Checksum":
suggest = "sha256_checksum"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileRemote. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileRemote.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceFileFileRemote.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
uri: str,
sha256_checksum: Optional[str] = None):
"""
:param str uri: Required. URI for this repository.
:param str sha256_checksum: SHA256 checksum of the remote file.
"""
pulumi.set(__self__, "uri", uri)
if sha256_checksum is not None:
pulumi.set(__self__, "sha256_checksum", sha256_checksum)
@property
@pulumi.getter
def uri(self) -> str:
"""
Required. URI for this repository.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="sha256Checksum")
def sha256_checksum(self) -> Optional[str]:
"""
SHA256 checksum of the remote file.
"""
return pulumi.get(self, "sha256_checksum")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkg(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "desiredState":
suggest = "desired_state"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourcePkg. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkg.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkg.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
desired_state: str,
apt: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgApt'] = None,
deb: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDeb'] = None,
googet: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgGooget'] = None,
msi: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsi'] = None,
rpm: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpm'] = None,
yum: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgYum'] = None,
zypper: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgZypper'] = None):
"""
:param str desired_state: Required. The desired state the agent should maintain for this package. Possible values: DESIRED_STATE_UNSPECIFIED, INSTALLED, REMOVED
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgAptArgs' apt: An Apt Repository.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebArgs' deb: A deb package file.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgGoogetArgs' googet: A package managed by GooGet.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiArgs' msi: An MSI package.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmArgs' rpm: An rpm package file.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgYumArgs' yum: A Yum Repository.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgZypperArgs' zypper: A Zypper Repository.
"""
pulumi.set(__self__, "desired_state", desired_state)
if apt is not None:
pulumi.set(__self__, "apt", apt)
if deb is not None:
pulumi.set(__self__, "deb", deb)
if googet is not None:
pulumi.set(__self__, "googet", googet)
if msi is not None:
pulumi.set(__self__, "msi", msi)
if rpm is not None:
pulumi.set(__self__, "rpm", rpm)
if yum is not None:
pulumi.set(__self__, "yum", yum)
if zypper is not None:
pulumi.set(__self__, "zypper", zypper)
@property
@pulumi.getter(name="desiredState")
def desired_state(self) -> str:
"""
Required. The desired state the agent should maintain for this package. Possible values: DESIRED_STATE_UNSPECIFIED, INSTALLED, REMOVED
"""
return pulumi.get(self, "desired_state")
@property
@pulumi.getter
def apt(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgApt']:
"""
An Apt Repository.
"""
return pulumi.get(self, "apt")
@property
@pulumi.getter
def deb(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDeb']:
"""
A deb package file.
"""
return pulumi.get(self, "deb")
@property
@pulumi.getter
def googet(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgGooget']:
"""
A package managed by GooGet.
"""
return pulumi.get(self, "googet")
@property
@pulumi.getter
def msi(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsi']:
"""
An MSI package.
"""
return pulumi.get(self, "msi")
@property
@pulumi.getter
def rpm(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpm']:
"""
An rpm package file.
"""
return pulumi.get(self, "rpm")
@property
@pulumi.getter
def yum(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgYum']:
"""
A Yum Repository.
"""
return pulumi.get(self, "yum")
@property
@pulumi.getter
def zypper(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgZypper']:
"""
A Zypper Repository.
"""
return pulumi.get(self, "zypper")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgApt(dict):
def __init__(__self__, *,
name: str):
"""
:param str name: Required. The name of the repository.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> str:
"""
Required. The name of the repository.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDeb(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "pullDeps":
suggest = "pull_deps"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDeb. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDeb.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDeb.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
source: 'outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSource',
pull_deps: Optional[bool] = None):
"""
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceArgs' source: Required. An rpm package.
:param bool pull_deps: Whether dependencies should also be installed. - install when false: `rpm --upgrade --replacepkgs package.rpm` - install when true: `yum -y install package.rpm` or `zypper -y install package.rpm`
"""
pulumi.set(__self__, "source", source)
if pull_deps is not None:
pulumi.set(__self__, "pull_deps", pull_deps)
@property
@pulumi.getter
def source(self) -> 'outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSource':
"""
Required. An rpm package.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="pullDeps")
def pull_deps(self) -> Optional[bool]:
"""
Whether dependencies should also be installed. - install when false: `rpm --upgrade --replacepkgs package.rpm` - install when true: `yum -y install package.rpm` or `zypper -y install package.rpm`
"""
return pulumi.get(self, "pull_deps")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSource(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowInsecure":
suggest = "allow_insecure"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSource. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSource.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSource.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allow_insecure: Optional[bool] = None,
gcs: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceGcs'] = None,
local_path: Optional[str] = None,
remote: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceRemote'] = None):
"""
:param bool allow_insecure: Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceGcsArgs' gcs: A Cloud Storage object.
:param str local_path: A local path within the VM to use.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceRemoteArgs' remote: A generic remote file.
"""
if allow_insecure is not None:
pulumi.set(__self__, "allow_insecure", allow_insecure)
if gcs is not None:
pulumi.set(__self__, "gcs", gcs)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
if remote is not None:
pulumi.set(__self__, "remote", remote)
@property
@pulumi.getter(name="allowInsecure")
def allow_insecure(self) -> Optional[bool]:
"""
Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
"""
return pulumi.get(self, "allow_insecure")
@property
@pulumi.getter
def gcs(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceGcs']:
"""
A Cloud Storage object.
"""
return pulumi.get(self, "gcs")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
A local path within the VM to use.
"""
return pulumi.get(self, "local_path")
@property
@pulumi.getter
def remote(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceRemote']:
"""
A generic remote file.
"""
return pulumi.get(self, "remote")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceGcs(dict):
def __init__(__self__, *,
bucket: str,
object: str,
generation: Optional[int] = None):
"""
:param str bucket: Required. Bucket of the Cloud Storage object.
:param str object: Required. Name of the Cloud Storage object.
:param int generation: Generation number of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "object", object)
if generation is not None:
pulumi.set(__self__, "generation", generation)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Required. Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def object(self) -> str:
"""
Required. Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@property
@pulumi.getter
def generation(self) -> Optional[int]:
"""
Generation number of the Cloud Storage object.
"""
return pulumi.get(self, "generation")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceRemote(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sha256Checksum":
suggest = "sha256_checksum"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceRemote. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceRemote.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgDebSourceRemote.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
uri: str,
sha256_checksum: Optional[str] = None):
"""
:param str uri: Required. URI for this repository.
:param str sha256_checksum: SHA256 checksum of the remote file.
"""
pulumi.set(__self__, "uri", uri)
if sha256_checksum is not None:
pulumi.set(__self__, "sha256_checksum", sha256_checksum)
@property
@pulumi.getter
def uri(self) -> str:
"""
Required. URI for this repository.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="sha256Checksum")
def sha256_checksum(self) -> Optional[str]:
"""
SHA256 checksum of the remote file.
"""
return pulumi.get(self, "sha256_checksum")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgGooget(dict):
def __init__(__self__, *,
name: str):
"""
:param str name: Required. The name of the repository.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> str:
"""
Required. The name of the repository.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsi(dict):
def __init__(__self__, *,
source: 'outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSource',
properties: Optional[Sequence[str]] = None):
"""
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceArgs' source: Required. An rpm package.
:param Sequence[str] properties: Additional properties to use during installation. This should be in the format of Property=Setting. Appended to the defaults of `ACTION=INSTALL REBOOT=ReallySuppress`.
"""
pulumi.set(__self__, "source", source)
if properties is not None:
pulumi.set(__self__, "properties", properties)
@property
@pulumi.getter
def source(self) -> 'outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSource':
"""
Required. An rpm package.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter
def properties(self) -> Optional[Sequence[str]]:
"""
Additional properties to use during installation. This should be in the format of Property=Setting. Appended to the defaults of `ACTION=INSTALL REBOOT=ReallySuppress`.
"""
return pulumi.get(self, "properties")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSource(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowInsecure":
suggest = "allow_insecure"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSource. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSource.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSource.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allow_insecure: Optional[bool] = None,
gcs: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceGcs'] = None,
local_path: Optional[str] = None,
remote: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceRemote'] = None):
"""
:param bool allow_insecure: Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceGcsArgs' gcs: A Cloud Storage object.
:param str local_path: A local path within the VM to use.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceRemoteArgs' remote: A generic remote file.
"""
if allow_insecure is not None:
pulumi.set(__self__, "allow_insecure", allow_insecure)
if gcs is not None:
pulumi.set(__self__, "gcs", gcs)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
if remote is not None:
pulumi.set(__self__, "remote", remote)
@property
@pulumi.getter(name="allowInsecure")
def allow_insecure(self) -> Optional[bool]:
"""
Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
"""
return pulumi.get(self, "allow_insecure")
@property
@pulumi.getter
def gcs(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceGcs']:
"""
A Cloud Storage object.
"""
return pulumi.get(self, "gcs")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
A local path within the VM to use.
"""
return pulumi.get(self, "local_path")
@property
@pulumi.getter
def remote(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceRemote']:
"""
A generic remote file.
"""
return pulumi.get(self, "remote")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceGcs(dict):
def __init__(__self__, *,
bucket: str,
object: str,
generation: Optional[int] = None):
"""
:param str bucket: Required. Bucket of the Cloud Storage object.
:param str object: Required. Name of the Cloud Storage object.
:param int generation: Generation number of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "object", object)
if generation is not None:
pulumi.set(__self__, "generation", generation)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Required. Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def object(self) -> str:
"""
Required. Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@property
@pulumi.getter
def generation(self) -> Optional[int]:
"""
Generation number of the Cloud Storage object.
"""
return pulumi.get(self, "generation")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceRemote(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sha256Checksum":
suggest = "sha256_checksum"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceRemote. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceRemote.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgMsiSourceRemote.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
uri: str,
sha256_checksum: Optional[str] = None):
"""
:param str uri: Required. URI for this repository.
:param str sha256_checksum: SHA256 checksum of the remote file.
"""
pulumi.set(__self__, "uri", uri)
if sha256_checksum is not None:
pulumi.set(__self__, "sha256_checksum", sha256_checksum)
@property
@pulumi.getter
def uri(self) -> str:
"""
Required. URI for this repository.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="sha256Checksum")
def sha256_checksum(self) -> Optional[str]:
"""
SHA256 checksum of the remote file.
"""
return pulumi.get(self, "sha256_checksum")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpm(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "pullDeps":
suggest = "pull_deps"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpm. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpm.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpm.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
source: 'outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSource',
pull_deps: Optional[bool] = None):
"""
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceArgs' source: Required. An rpm package.
:param bool pull_deps: Whether dependencies should also be installed. - install when false: `rpm --upgrade --replacepkgs package.rpm` - install when true: `yum -y install package.rpm` or `zypper -y install package.rpm`
"""
pulumi.set(__self__, "source", source)
if pull_deps is not None:
pulumi.set(__self__, "pull_deps", pull_deps)
@property
@pulumi.getter
def source(self) -> 'outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSource':
"""
Required. An rpm package.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="pullDeps")
def pull_deps(self) -> Optional[bool]:
"""
Whether dependencies should also be installed. - install when false: `rpm --upgrade --replacepkgs package.rpm` - install when true: `yum -y install package.rpm` or `zypper -y install package.rpm`
"""
return pulumi.get(self, "pull_deps")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSource(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowInsecure":
suggest = "allow_insecure"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSource. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSource.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSource.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allow_insecure: Optional[bool] = None,
gcs: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceGcs'] = None,
local_path: Optional[str] = None,
remote: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceRemote'] = None):
"""
:param bool allow_insecure: Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceGcsArgs' gcs: A Cloud Storage object.
:param str local_path: A local path within the VM to use.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceRemoteArgs' remote: A generic remote file.
"""
if allow_insecure is not None:
pulumi.set(__self__, "allow_insecure", allow_insecure)
if gcs is not None:
pulumi.set(__self__, "gcs", gcs)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
if remote is not None:
pulumi.set(__self__, "remote", remote)
@property
@pulumi.getter(name="allowInsecure")
def allow_insecure(self) -> Optional[bool]:
"""
Defaults to false. When false, files are subject to validations based on the file type: Remote: A checksum must be specified. Cloud Storage: An object generation number must be specified.
"""
return pulumi.get(self, "allow_insecure")
@property
@pulumi.getter
def gcs(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceGcs']:
"""
A Cloud Storage object.
"""
return pulumi.get(self, "gcs")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
A local path within the VM to use.
"""
return pulumi.get(self, "local_path")
@property
@pulumi.getter
def remote(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceRemote']:
"""
A generic remote file.
"""
return pulumi.get(self, "remote")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceGcs(dict):
def __init__(__self__, *,
bucket: str,
object: str,
generation: Optional[int] = None):
"""
:param str bucket: Required. Bucket of the Cloud Storage object.
:param str object: Required. Name of the Cloud Storage object.
:param int generation: Generation number of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "object", object)
if generation is not None:
pulumi.set(__self__, "generation", generation)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Required. Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def object(self) -> str:
"""
Required. Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@property
@pulumi.getter
def generation(self) -> Optional[int]:
"""
Generation number of the Cloud Storage object.
"""
return pulumi.get(self, "generation")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceRemote(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sha256Checksum":
suggest = "sha256_checksum"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceRemote. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceRemote.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourcePkgRpmSourceRemote.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
uri: str,
sha256_checksum: Optional[str] = None):
"""
:param str uri: Required. URI for this repository.
:param str sha256_checksum: SHA256 checksum of the remote file.
"""
pulumi.set(__self__, "uri", uri)
if sha256_checksum is not None:
pulumi.set(__self__, "sha256_checksum", sha256_checksum)
@property
@pulumi.getter
def uri(self) -> str:
"""
Required. URI for this repository.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="sha256Checksum")
def sha256_checksum(self) -> Optional[str]:
"""
SHA256 checksum of the remote file.
"""
return pulumi.get(self, "sha256_checksum")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgYum(dict):
def __init__(__self__, *,
name: str):
"""
:param str name: Required. The name of the repository.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> str:
"""
Required. The name of the repository.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourcePkgZypper(dict):
def __init__(__self__, *,
name: str):
"""
:param str name: Required. The name of the repository.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> str:
"""
Required. The name of the repository.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceRepository(dict):
def __init__(__self__, *,
apt: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryApt'] = None,
goo: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryGoo'] = None,
yum: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryYum'] = None,
zypper: Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryZypper'] = None):
"""
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryAptArgs' apt: An Apt Repository.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryGooArgs' goo: A Goo Repository.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryYumArgs' yum: A Yum Repository.
:param 'OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryZypperArgs' zypper: A Zypper Repository.
"""
if apt is not None:
pulumi.set(__self__, "apt", apt)
if goo is not None:
pulumi.set(__self__, "goo", goo)
if yum is not None:
pulumi.set(__self__, "yum", yum)
if zypper is not None:
pulumi.set(__self__, "zypper", zypper)
@property
@pulumi.getter
def apt(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryApt']:
"""
An Apt Repository.
"""
return pulumi.get(self, "apt")
@property
@pulumi.getter
def goo(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryGoo']:
"""
A Goo Repository.
"""
return pulumi.get(self, "goo")
@property
@pulumi.getter
def yum(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryYum']:
"""
A Yum Repository.
"""
return pulumi.get(self, "yum")
@property
@pulumi.getter
def zypper(self) -> Optional['outputs.OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryZypper']:
"""
A Zypper Repository.
"""
return pulumi.get(self, "zypper")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryApt(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "archiveType":
suggest = "archive_type"
elif key == "gpgKey":
suggest = "gpg_key"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryApt. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryApt.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryApt.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
archive_type: str,
components: Sequence[str],
distribution: str,
uri: str,
gpg_key: Optional[str] = None):
"""
:param str archive_type: Required. Type of archive files in this repository. Possible values: ARCHIVE_TYPE_UNSPECIFIED, DEB, DEB_SRC
:param Sequence[str] components: Required. List of components for this repository. Must contain at least one item.
:param str distribution: Required. Distribution of this repository.
:param str uri: Required. URI for this repository.
:param str gpg_key: URI of the key file for this repository. The agent maintains a keyring at `/etc/apt/trusted.gpg.d/osconfig_agent_managed.gpg`.
"""
pulumi.set(__self__, "archive_type", archive_type)
pulumi.set(__self__, "components", components)
pulumi.set(__self__, "distribution", distribution)
pulumi.set(__self__, "uri", uri)
if gpg_key is not None:
pulumi.set(__self__, "gpg_key", gpg_key)
@property
@pulumi.getter(name="archiveType")
def archive_type(self) -> str:
"""
Required. Type of archive files in this repository. Possible values: ARCHIVE_TYPE_UNSPECIFIED, DEB, DEB_SRC
"""
return pulumi.get(self, "archive_type")
@property
@pulumi.getter
def components(self) -> Sequence[str]:
"""
Required. List of components for this repository. Must contain at least one item.
"""
return pulumi.get(self, "components")
@property
@pulumi.getter
def distribution(self) -> str:
"""
Required. Distribution of this repository.
"""
return pulumi.get(self, "distribution")
@property
@pulumi.getter
def uri(self) -> str:
"""
Required. URI for this repository.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="gpgKey")
def gpg_key(self) -> Optional[str]:
"""
URI of the key file for this repository. The agent maintains a keyring at `/etc/apt/trusted.gpg.d/osconfig_agent_managed.gpg`.
"""
return pulumi.get(self, "gpg_key")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryGoo(dict):
def __init__(__self__, *,
name: str,
url: str):
"""
:param str name: Required. The name of the repository.
:param str url: Required. The url of the repository.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def name(self) -> str:
"""
Required. The name of the repository.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def url(self) -> str:
"""
Required. The url of the repository.
"""
return pulumi.get(self, "url")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryYum(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "baseUrl":
suggest = "base_url"
elif key == "displayName":
suggest = "display_name"
elif key == "gpgKeys":
suggest = "gpg_keys"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryYum. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryYum.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryYum.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
base_url: str,
id: str,
display_name: Optional[str] = None,
gpg_keys: Optional[Sequence[str]] = None):
"""
:param str base_url: Required. The location of the repository directory.
:param str id: Required. A one word, unique name for this repository. This is the `repo id` in the zypper config file and also the `display_name` if `display_name` is omitted. This id is also used as the unique identifier when checking for GuestPolicy conflicts.
:param str display_name: The display name of the repository.
:param Sequence[str] gpg_keys: URIs of GPG keys.
"""
pulumi.set(__self__, "base_url", base_url)
pulumi.set(__self__, "id", id)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if gpg_keys is not None:
pulumi.set(__self__, "gpg_keys", gpg_keys)
@property
@pulumi.getter(name="baseUrl")
def base_url(self) -> str:
"""
Required. The location of the repository directory.
"""
return pulumi.get(self, "base_url")
@property
@pulumi.getter
def id(self) -> str:
"""
Required. A one word, unique name for this repository. This is the `repo id` in the zypper config file and also the `display_name` if `display_name` is omitted. This id is also used as the unique identifier when checking for GuestPolicy conflicts.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
"""
The display name of the repository.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="gpgKeys")
def gpg_keys(self) -> Optional[Sequence[str]]:
"""
URIs of GPG keys.
"""
return pulumi.get(self, "gpg_keys")
@pulumi.output_type
class OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryZypper(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "baseUrl":
suggest = "base_url"
elif key == "displayName":
suggest = "display_name"
elif key == "gpgKeys":
suggest = "gpg_keys"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryZypper. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryZypper.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentOsPolicyResourceGroupResourceRepositoryZypper.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
base_url: str,
id: str,
display_name: Optional[str] = None,
gpg_keys: Optional[Sequence[str]] = None):
"""
:param str base_url: Required. The location of the repository directory.
:param str id: Required. A one word, unique name for this repository. This is the `repo id` in the zypper config file and also the `display_name` if `display_name` is omitted. This id is also used as the unique identifier when checking for GuestPolicy conflicts.
:param str display_name: The display name of the repository.
:param Sequence[str] gpg_keys: URIs of GPG keys.
"""
pulumi.set(__self__, "base_url", base_url)
pulumi.set(__self__, "id", id)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if gpg_keys is not None:
pulumi.set(__self__, "gpg_keys", gpg_keys)
@property
@pulumi.getter(name="baseUrl")
def base_url(self) -> str:
"""
Required. The location of the repository directory.
"""
return pulumi.get(self, "base_url")
@property
@pulumi.getter
def id(self) -> str:
"""
Required. A one word, unique name for this repository. This is the `repo id` in the zypper config file and also the `display_name` if `display_name` is omitted. This id is also used as the unique identifier when checking for GuestPolicy conflicts.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
"""
The display name of the repository.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="gpgKeys")
def gpg_keys(self) -> Optional[Sequence[str]]:
"""
URIs of GPG keys.
"""
return pulumi.get(self, "gpg_keys")
@pulumi.output_type
class OsPolicyAssignmentRollout(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "disruptionBudget":
suggest = "disruption_budget"
elif key == "minWaitDuration":
suggest = "min_wait_duration"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OsPolicyAssignmentRollout. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OsPolicyAssignmentRollout.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OsPolicyAssignmentRollout.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
disruption_budget: 'outputs.OsPolicyAssignmentRolloutDisruptionBudget',
min_wait_duration: str):
"""
:param 'OsPolicyAssignmentRolloutDisruptionBudgetArgs' disruption_budget: Required. The maximum number (or percentage) of VMs per zone to disrupt at any given moment.
:param str min_wait_duration: Required. This determines the minimum duration of time to wait after the configuration changes are applied through the current rollout. A VM continues to count towards the `disruption_budget` at least until this duration of time has passed after configuration changes are applied.
"""
pulumi.set(__self__, "disruption_budget", disruption_budget)
pulumi.set(__self__, "min_wait_duration", min_wait_duration)
@property
@pulumi.getter(name="disruptionBudget")
def disruption_budget(self) -> 'outputs.OsPolicyAssignmentRolloutDisruptionBudget':
"""
Required. The maximum number (or percentage) of VMs per zone to disrupt at any given moment.
"""
return pulumi.get(self, "disruption_budget")
@property
@pulumi.getter(name="minWaitDuration")
def min_wait_duration(self) -> str:
"""
Required. This determines the minimum duration of time to wait after the configuration changes are applied through the current rollout. A VM continues to count towards the `disruption_budget` at least until this duration of time has passed after configuration changes are applied.
"""
return pulumi.get(self, "min_wait_duration")
@pulumi.output_type
class OsPolicyAssignmentRolloutDisruptionBudget(dict):
def __init__(__self__, *,
fixed: Optional[int] = None,
percent: Optional[int] = None):
"""
:param int fixed: Specifies a fixed value.
:param int percent: Specifies the relative value defined as a percentage, which will be multiplied by a reference value.
"""
if fixed is not None:
pulumi.set(__self__, "fixed", fixed)
if percent is not None:
pulumi.set(__self__, "percent", percent)
@property
@pulumi.getter
def fixed(self) -> Optional[int]:
"""
Specifies a fixed value.
"""
return pulumi.get(self, "fixed")
@property
@pulumi.getter
def percent(self) -> Optional[int]:
"""
Specifies the relative value defined as a percentage, which will be multiplied by a reference value.
"""
return pulumi.get(self, "percent")
@pulumi.output_type
class PatchDeploymentInstanceFilter(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "groupLabels":
suggest = "group_labels"
elif key == "instanceNamePrefixes":
suggest = "instance_name_prefixes"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentInstanceFilter. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentInstanceFilter.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentInstanceFilter.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
all: Optional[bool] = None,
group_labels: Optional[Sequence['outputs.PatchDeploymentInstanceFilterGroupLabel']] = None,
instance_name_prefixes: Optional[Sequence[str]] = None,
instances: Optional[Sequence[str]] = None,
zones: Optional[Sequence[str]] = None):
"""
:param bool all: Target all VM instances in the project. If true, no other criteria is permitted.
:param Sequence['PatchDeploymentInstanceFilterGroupLabelArgs'] group_labels: Targets VM instances matching ANY of these GroupLabels. This allows targeting of disparate groups of VM instances.
Structure is documented below.
:param Sequence[str] instance_name_prefixes: Targets VMs whose name starts with one of these prefixes. Similar to labels, this is another way to group
VMs when targeting configs, for example prefix="prod-".
:param Sequence[str] instances: Targets any of the VM instances specified. Instances are specified by their URI in the `form zones/{{zone}}/instances/{{instance_name}}`,
`projects/{{project_id}}/zones/{{zone}}/instances/{{instance_name}}`, or
`https://www.googleapis.com/compute/v1/projects/{{project_id}}/zones/{{zone}}/instances/{{instance_name}}`
:param Sequence[str] zones: Targets VM instances in ANY of these zones. Leave empty to target VM instances in any zone.
"""
if all is not None:
pulumi.set(__self__, "all", all)
if group_labels is not None:
pulumi.set(__self__, "group_labels", group_labels)
if instance_name_prefixes is not None:
pulumi.set(__self__, "instance_name_prefixes", instance_name_prefixes)
if instances is not None:
pulumi.set(__self__, "instances", instances)
if zones is not None:
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter
def all(self) -> Optional[bool]:
"""
Target all VM instances in the project. If true, no other criteria is permitted.
"""
return pulumi.get(self, "all")
@property
@pulumi.getter(name="groupLabels")
def group_labels(self) -> Optional[Sequence['outputs.PatchDeploymentInstanceFilterGroupLabel']]:
"""
Targets VM instances matching ANY of these GroupLabels. This allows targeting of disparate groups of VM instances.
Structure is documented below.
"""
return pulumi.get(self, "group_labels")
@property
@pulumi.getter(name="instanceNamePrefixes")
def instance_name_prefixes(self) -> Optional[Sequence[str]]:
"""
Targets VMs whose name starts with one of these prefixes. Similar to labels, this is another way to group
VMs when targeting configs, for example prefix="prod-".
"""
return pulumi.get(self, "instance_name_prefixes")
@property
@pulumi.getter
def instances(self) -> Optional[Sequence[str]]:
"""
Targets any of the VM instances specified. Instances are specified by their URI in the `form zones/{{zone}}/instances/{{instance_name}}`,
`projects/{{project_id}}/zones/{{zone}}/instances/{{instance_name}}`, or
`https://www.googleapis.com/compute/v1/projects/{{project_id}}/zones/{{zone}}/instances/{{instance_name}}`
"""
return pulumi.get(self, "instances")
@property
@pulumi.getter
def zones(self) -> Optional[Sequence[str]]:
"""
Targets VM instances in ANY of these zones. Leave empty to target VM instances in any zone.
"""
return pulumi.get(self, "zones")
@pulumi.output_type
class PatchDeploymentInstanceFilterGroupLabel(dict):
def __init__(__self__, *,
labels: Mapping[str, str]):
"""
:param Mapping[str, str] labels: Compute Engine instance labels that must be present for a VM instance to be targeted by this filter
"""
pulumi.set(__self__, "labels", labels)
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
"""
Compute Engine instance labels that must be present for a VM instance to be targeted by this filter
"""
return pulumi.get(self, "labels")
@pulumi.output_type
class PatchDeploymentOneTimeSchedule(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "executeTime":
suggest = "execute_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentOneTimeSchedule. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentOneTimeSchedule.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentOneTimeSchedule.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
execute_time: str):
"""
:param str execute_time: The desired patch job execution time. A timestamp in RFC3339 UTC "Zulu" format,
accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
"""
pulumi.set(__self__, "execute_time", execute_time)
@property
@pulumi.getter(name="executeTime")
def execute_time(self) -> str:
"""
The desired patch job execution time. A timestamp in RFC3339 UTC "Zulu" format,
accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
"""
return pulumi.get(self, "execute_time")
@pulumi.output_type
class PatchDeploymentPatchConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "postStep":
suggest = "post_step"
elif key == "preStep":
suggest = "pre_step"
elif key == "rebootConfig":
suggest = "reboot_config"
elif key == "windowsUpdate":
suggest = "windows_update"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
apt: Optional['outputs.PatchDeploymentPatchConfigApt'] = None,
goo: Optional['outputs.PatchDeploymentPatchConfigGoo'] = None,
post_step: Optional['outputs.PatchDeploymentPatchConfigPostStep'] = None,
pre_step: Optional['outputs.PatchDeploymentPatchConfigPreStep'] = None,
reboot_config: Optional[str] = None,
windows_update: Optional['outputs.PatchDeploymentPatchConfigWindowsUpdate'] = None,
yum: Optional['outputs.PatchDeploymentPatchConfigYum'] = None,
zypper: Optional['outputs.PatchDeploymentPatchConfigZypper'] = None):
"""
:param 'PatchDeploymentPatchConfigAptArgs' apt: Apt update settings. Use this setting to override the default apt patch rules.
Structure is documented below.
:param 'PatchDeploymentPatchConfigGooArgs' goo: goo update settings. Use this setting to override the default goo patch rules.
Structure is documented below.
:param 'PatchDeploymentPatchConfigPostStepArgs' post_step: The ExecStep to run after the patch update.
Structure is documented below.
:param 'PatchDeploymentPatchConfigPreStepArgs' pre_step: The ExecStep to run before the patch update.
Structure is documented below.
:param str reboot_config: Post-patch reboot settings.
Possible values are `DEFAULT`, `ALWAYS`, and `NEVER`.
:param 'PatchDeploymentPatchConfigWindowsUpdateArgs' windows_update: Windows update settings. Use this setting to override the default Windows patch rules.
Structure is documented below.
:param 'PatchDeploymentPatchConfigYumArgs' yum: Yum update settings. Use this setting to override the default yum patch rules.
Structure is documented below.
:param 'PatchDeploymentPatchConfigZypperArgs' zypper: zypper update settings. Use this setting to override the default zypper patch rules.
Structure is documented below.
"""
if apt is not None:
pulumi.set(__self__, "apt", apt)
if goo is not None:
pulumi.set(__self__, "goo", goo)
if post_step is not None:
pulumi.set(__self__, "post_step", post_step)
if pre_step is not None:
pulumi.set(__self__, "pre_step", pre_step)
if reboot_config is not None:
pulumi.set(__self__, "reboot_config", reboot_config)
if windows_update is not None:
pulumi.set(__self__, "windows_update", windows_update)
if yum is not None:
pulumi.set(__self__, "yum", yum)
if zypper is not None:
pulumi.set(__self__, "zypper", zypper)
@property
@pulumi.getter
def apt(self) -> Optional['outputs.PatchDeploymentPatchConfigApt']:
"""
Apt update settings. Use this setting to override the default apt patch rules.
Structure is documented below.
"""
return pulumi.get(self, "apt")
@property
@pulumi.getter
def goo(self) -> Optional['outputs.PatchDeploymentPatchConfigGoo']:
"""
goo update settings. Use this setting to override the default goo patch rules.
Structure is documented below.
"""
return pulumi.get(self, "goo")
@property
@pulumi.getter(name="postStep")
def post_step(self) -> Optional['outputs.PatchDeploymentPatchConfigPostStep']:
"""
The ExecStep to run after the patch update.
Structure is documented below.
"""
return pulumi.get(self, "post_step")
@property
@pulumi.getter(name="preStep")
def pre_step(self) -> Optional['outputs.PatchDeploymentPatchConfigPreStep']:
"""
The ExecStep to run before the patch update.
Structure is documented below.
"""
return pulumi.get(self, "pre_step")
@property
@pulumi.getter(name="rebootConfig")
def reboot_config(self) -> Optional[str]:
"""
Post-patch reboot settings.
Possible values are `DEFAULT`, `ALWAYS`, and `NEVER`.
"""
return pulumi.get(self, "reboot_config")
@property
@pulumi.getter(name="windowsUpdate")
def windows_update(self) -> Optional['outputs.PatchDeploymentPatchConfigWindowsUpdate']:
"""
Windows update settings. Use this setting to override the default Windows patch rules.
Structure is documented below.
"""
return pulumi.get(self, "windows_update")
@property
@pulumi.getter
def yum(self) -> Optional['outputs.PatchDeploymentPatchConfigYum']:
"""
Yum update settings. Use this setting to override the default yum patch rules.
Structure is documented below.
"""
return pulumi.get(self, "yum")
@property
@pulumi.getter
def zypper(self) -> Optional['outputs.PatchDeploymentPatchConfigZypper']:
"""
zypper update settings. Use this setting to override the default zypper patch rules.
Structure is documented below.
"""
return pulumi.get(self, "zypper")
@pulumi.output_type
class PatchDeploymentPatchConfigApt(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "exclusivePackages":
suggest = "exclusive_packages"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigApt. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigApt.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigApt.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
excludes: Optional[Sequence[str]] = None,
exclusive_packages: Optional[Sequence[str]] = None,
type: Optional[str] = None):
"""
:param Sequence[str] excludes: List of KBs to exclude from update.
:param Sequence[str] exclusive_packages: An exclusive list of packages to be updated. These are the only packages that will be updated.
If these packages are not installed, they will be ignored. This field cannot be specified with
any other patch configuration fields.
:param str type: By changing the type to DIST, the patching is performed using apt-get dist-upgrade instead.
Possible values are `DIST` and `UPGRADE`.
"""
if excludes is not None:
pulumi.set(__self__, "excludes", excludes)
if exclusive_packages is not None:
pulumi.set(__self__, "exclusive_packages", exclusive_packages)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def excludes(self) -> Optional[Sequence[str]]:
"""
List of KBs to exclude from update.
"""
return pulumi.get(self, "excludes")
@property
@pulumi.getter(name="exclusivePackages")
def exclusive_packages(self) -> Optional[Sequence[str]]:
"""
An exclusive list of packages to be updated. These are the only packages that will be updated.
If these packages are not installed, they will be ignored. This field cannot be specified with
any other patch configuration fields.
"""
return pulumi.get(self, "exclusive_packages")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
By changing the type to DIST, the patching is performed using apt-get dist-upgrade instead.
Possible values are `DIST` and `UPGRADE`.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class PatchDeploymentPatchConfigGoo(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: goo update settings. Use this setting to override the default goo patch rules.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
goo update settings. Use this setting to override the default goo patch rules.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class PatchDeploymentPatchConfigPostStep(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "linuxExecStepConfig":
suggest = "linux_exec_step_config"
elif key == "windowsExecStepConfig":
suggest = "windows_exec_step_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPostStep. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPostStep.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPostStep.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
linux_exec_step_config: Optional['outputs.PatchDeploymentPatchConfigPostStepLinuxExecStepConfig'] = None,
windows_exec_step_config: Optional['outputs.PatchDeploymentPatchConfigPostStepWindowsExecStepConfig'] = None):
"""
:param 'PatchDeploymentPatchConfigPostStepLinuxExecStepConfigArgs' linux_exec_step_config: The ExecStepConfig for all Linux VMs targeted by the PatchJob.
Structure is documented below.
:param 'PatchDeploymentPatchConfigPostStepWindowsExecStepConfigArgs' windows_exec_step_config: The ExecStepConfig for all Windows VMs targeted by the PatchJob.
Structure is documented below.
"""
if linux_exec_step_config is not None:
pulumi.set(__self__, "linux_exec_step_config", linux_exec_step_config)
if windows_exec_step_config is not None:
pulumi.set(__self__, "windows_exec_step_config", windows_exec_step_config)
@property
@pulumi.getter(name="linuxExecStepConfig")
def linux_exec_step_config(self) -> Optional['outputs.PatchDeploymentPatchConfigPostStepLinuxExecStepConfig']:
"""
The ExecStepConfig for all Linux VMs targeted by the PatchJob.
Structure is documented below.
"""
return pulumi.get(self, "linux_exec_step_config")
@property
@pulumi.getter(name="windowsExecStepConfig")
def windows_exec_step_config(self) -> Optional['outputs.PatchDeploymentPatchConfigPostStepWindowsExecStepConfig']:
"""
The ExecStepConfig for all Windows VMs targeted by the PatchJob.
Structure is documented below.
"""
return pulumi.get(self, "windows_exec_step_config")
@pulumi.output_type
class PatchDeploymentPatchConfigPostStepLinuxExecStepConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowedSuccessCodes":
suggest = "allowed_success_codes"
elif key == "gcsObject":
suggest = "gcs_object"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPostStepLinuxExecStepConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPostStepLinuxExecStepConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPostStepLinuxExecStepConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allowed_success_codes: Optional[Sequence[int]] = None,
gcs_object: Optional['outputs.PatchDeploymentPatchConfigPostStepLinuxExecStepConfigGcsObject'] = None,
interpreter: Optional[str] = None,
local_path: Optional[str] = None):
"""
:param Sequence[int] allowed_success_codes: Defaults to [0]. A list of possible return values that the execution can return to indicate a success.
:param 'PatchDeploymentPatchConfigPostStepLinuxExecStepConfigGcsObjectArgs' gcs_object: A Cloud Storage object containing the executable.
Structure is documented below.
:param str interpreter: The script interpreter to use to run the script. If no interpreter is specified the script will
be executed directly, which will likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
:param str local_path: An absolute path to the executable on the VM.
"""
if allowed_success_codes is not None:
pulumi.set(__self__, "allowed_success_codes", allowed_success_codes)
if gcs_object is not None:
pulumi.set(__self__, "gcs_object", gcs_object)
if interpreter is not None:
pulumi.set(__self__, "interpreter", interpreter)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
@property
@pulumi.getter(name="allowedSuccessCodes")
def allowed_success_codes(self) -> Optional[Sequence[int]]:
"""
Defaults to [0]. A list of possible return values that the execution can return to indicate a success.
"""
return pulumi.get(self, "allowed_success_codes")
@property
@pulumi.getter(name="gcsObject")
def gcs_object(self) -> Optional['outputs.PatchDeploymentPatchConfigPostStepLinuxExecStepConfigGcsObject']:
"""
A Cloud Storage object containing the executable.
Structure is documented below.
"""
return pulumi.get(self, "gcs_object")
@property
@pulumi.getter
def interpreter(self) -> Optional[str]:
"""
The script interpreter to use to run the script. If no interpreter is specified the script will
be executed directly, which will likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
"""
return pulumi.get(self, "interpreter")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
An absolute path to the executable on the VM.
"""
return pulumi.get(self, "local_path")
@pulumi.output_type
class PatchDeploymentPatchConfigPostStepLinuxExecStepConfigGcsObject(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "generationNumber":
suggest = "generation_number"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPostStepLinuxExecStepConfigGcsObject. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPostStepLinuxExecStepConfigGcsObject.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPostStepLinuxExecStepConfigGcsObject.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
bucket: str,
generation_number: str,
object: str):
"""
:param str bucket: Bucket of the Cloud Storage object.
:param str generation_number: Generation number of the Cloud Storage object. This is used to ensure that the ExecStep specified by this PatchJob does not change.
:param str object: Name of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "generation_number", generation_number)
pulumi.set(__self__, "object", object)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="generationNumber")
def generation_number(self) -> str:
"""
Generation number of the Cloud Storage object. This is used to ensure that the ExecStep specified by this PatchJob does not change.
"""
return pulumi.get(self, "generation_number")
@property
@pulumi.getter
def object(self) -> str:
"""
Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@pulumi.output_type
class PatchDeploymentPatchConfigPostStepWindowsExecStepConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowedSuccessCodes":
suggest = "allowed_success_codes"
elif key == "gcsObject":
suggest = "gcs_object"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPostStepWindowsExecStepConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPostStepWindowsExecStepConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPostStepWindowsExecStepConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allowed_success_codes: Optional[Sequence[int]] = None,
gcs_object: Optional['outputs.PatchDeploymentPatchConfigPostStepWindowsExecStepConfigGcsObject'] = None,
interpreter: Optional[str] = None,
local_path: Optional[str] = None):
"""
:param Sequence[int] allowed_success_codes: Defaults to [0]. A list of possible return values that the execution can return to indicate a success.
:param 'PatchDeploymentPatchConfigPostStepWindowsExecStepConfigGcsObjectArgs' gcs_object: A Cloud Storage object containing the executable.
Structure is documented below.
:param str interpreter: The script interpreter to use to run the script. If no interpreter is specified the script will
be executed directly, which will likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
:param str local_path: An absolute path to the executable on the VM.
"""
if allowed_success_codes is not None:
pulumi.set(__self__, "allowed_success_codes", allowed_success_codes)
if gcs_object is not None:
pulumi.set(__self__, "gcs_object", gcs_object)
if interpreter is not None:
pulumi.set(__self__, "interpreter", interpreter)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
@property
@pulumi.getter(name="allowedSuccessCodes")
def allowed_success_codes(self) -> Optional[Sequence[int]]:
"""
Defaults to [0]. A list of possible return values that the execution can return to indicate a success.
"""
return pulumi.get(self, "allowed_success_codes")
@property
@pulumi.getter(name="gcsObject")
def gcs_object(self) -> Optional['outputs.PatchDeploymentPatchConfigPostStepWindowsExecStepConfigGcsObject']:
"""
A Cloud Storage object containing the executable.
Structure is documented below.
"""
return pulumi.get(self, "gcs_object")
@property
@pulumi.getter
def interpreter(self) -> Optional[str]:
"""
The script interpreter to use to run the script. If no interpreter is specified the script will
be executed directly, which will likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
"""
return pulumi.get(self, "interpreter")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
An absolute path to the executable on the VM.
"""
return pulumi.get(self, "local_path")
@pulumi.output_type
class PatchDeploymentPatchConfigPostStepWindowsExecStepConfigGcsObject(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "generationNumber":
suggest = "generation_number"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPostStepWindowsExecStepConfigGcsObject. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPostStepWindowsExecStepConfigGcsObject.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPostStepWindowsExecStepConfigGcsObject.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
bucket: str,
generation_number: str,
object: str):
"""
:param str bucket: Bucket of the Cloud Storage object.
:param str generation_number: Generation number of the Cloud Storage object. This is used to ensure that the ExecStep specified by this PatchJob does not change.
:param str object: Name of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "generation_number", generation_number)
pulumi.set(__self__, "object", object)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="generationNumber")
def generation_number(self) -> str:
"""
Generation number of the Cloud Storage object. This is used to ensure that the ExecStep specified by this PatchJob does not change.
"""
return pulumi.get(self, "generation_number")
@property
@pulumi.getter
def object(self) -> str:
"""
Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@pulumi.output_type
class PatchDeploymentPatchConfigPreStep(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "linuxExecStepConfig":
suggest = "linux_exec_step_config"
elif key == "windowsExecStepConfig":
suggest = "windows_exec_step_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPreStep. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPreStep.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPreStep.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
linux_exec_step_config: Optional['outputs.PatchDeploymentPatchConfigPreStepLinuxExecStepConfig'] = None,
windows_exec_step_config: Optional['outputs.PatchDeploymentPatchConfigPreStepWindowsExecStepConfig'] = None):
"""
:param 'PatchDeploymentPatchConfigPreStepLinuxExecStepConfigArgs' linux_exec_step_config: The ExecStepConfig for all Linux VMs targeted by the PatchJob.
Structure is documented below.
:param 'PatchDeploymentPatchConfigPreStepWindowsExecStepConfigArgs' windows_exec_step_config: The ExecStepConfig for all Windows VMs targeted by the PatchJob.
Structure is documented below.
"""
if linux_exec_step_config is not None:
pulumi.set(__self__, "linux_exec_step_config", linux_exec_step_config)
if windows_exec_step_config is not None:
pulumi.set(__self__, "windows_exec_step_config", windows_exec_step_config)
@property
@pulumi.getter(name="linuxExecStepConfig")
def linux_exec_step_config(self) -> Optional['outputs.PatchDeploymentPatchConfigPreStepLinuxExecStepConfig']:
"""
The ExecStepConfig for all Linux VMs targeted by the PatchJob.
Structure is documented below.
"""
return pulumi.get(self, "linux_exec_step_config")
@property
@pulumi.getter(name="windowsExecStepConfig")
def windows_exec_step_config(self) -> Optional['outputs.PatchDeploymentPatchConfigPreStepWindowsExecStepConfig']:
"""
The ExecStepConfig for all Windows VMs targeted by the PatchJob.
Structure is documented below.
"""
return pulumi.get(self, "windows_exec_step_config")
@pulumi.output_type
class PatchDeploymentPatchConfigPreStepLinuxExecStepConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowedSuccessCodes":
suggest = "allowed_success_codes"
elif key == "gcsObject":
suggest = "gcs_object"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPreStepLinuxExecStepConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPreStepLinuxExecStepConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPreStepLinuxExecStepConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allowed_success_codes: Optional[Sequence[int]] = None,
gcs_object: Optional['outputs.PatchDeploymentPatchConfigPreStepLinuxExecStepConfigGcsObject'] = None,
interpreter: Optional[str] = None,
local_path: Optional[str] = None):
"""
:param Sequence[int] allowed_success_codes: Defaults to [0]. A list of possible return values that the execution can return to indicate a success.
:param 'PatchDeploymentPatchConfigPreStepLinuxExecStepConfigGcsObjectArgs' gcs_object: A Cloud Storage object containing the executable.
Structure is documented below.
:param str interpreter: The script interpreter to use to run the script. If no interpreter is specified the script will
be executed directly, which will likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
:param str local_path: An absolute path to the executable on the VM.
"""
if allowed_success_codes is not None:
pulumi.set(__self__, "allowed_success_codes", allowed_success_codes)
if gcs_object is not None:
pulumi.set(__self__, "gcs_object", gcs_object)
if interpreter is not None:
pulumi.set(__self__, "interpreter", interpreter)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
@property
@pulumi.getter(name="allowedSuccessCodes")
def allowed_success_codes(self) -> Optional[Sequence[int]]:
"""
Defaults to [0]. A list of possible return values that the execution can return to indicate a success.
"""
return pulumi.get(self, "allowed_success_codes")
@property
@pulumi.getter(name="gcsObject")
def gcs_object(self) -> Optional['outputs.PatchDeploymentPatchConfigPreStepLinuxExecStepConfigGcsObject']:
"""
A Cloud Storage object containing the executable.
Structure is documented below.
"""
return pulumi.get(self, "gcs_object")
@property
@pulumi.getter
def interpreter(self) -> Optional[str]:
"""
The script interpreter to use to run the script. If no interpreter is specified the script will
be executed directly, which will likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
"""
return pulumi.get(self, "interpreter")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
An absolute path to the executable on the VM.
"""
return pulumi.get(self, "local_path")
@pulumi.output_type
class PatchDeploymentPatchConfigPreStepLinuxExecStepConfigGcsObject(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "generationNumber":
suggest = "generation_number"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPreStepLinuxExecStepConfigGcsObject. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPreStepLinuxExecStepConfigGcsObject.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPreStepLinuxExecStepConfigGcsObject.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
bucket: str,
generation_number: str,
object: str):
"""
:param str bucket: Bucket of the Cloud Storage object.
:param str generation_number: Generation number of the Cloud Storage object. This is used to ensure that the ExecStep specified by this PatchJob does not change.
:param str object: Name of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "generation_number", generation_number)
pulumi.set(__self__, "object", object)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="generationNumber")
def generation_number(self) -> str:
"""
Generation number of the Cloud Storage object. This is used to ensure that the ExecStep specified by this PatchJob does not change.
"""
return pulumi.get(self, "generation_number")
@property
@pulumi.getter
def object(self) -> str:
"""
Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@pulumi.output_type
class PatchDeploymentPatchConfigPreStepWindowsExecStepConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allowedSuccessCodes":
suggest = "allowed_success_codes"
elif key == "gcsObject":
suggest = "gcs_object"
elif key == "localPath":
suggest = "local_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPreStepWindowsExecStepConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPreStepWindowsExecStepConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPreStepWindowsExecStepConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allowed_success_codes: Optional[Sequence[int]] = None,
gcs_object: Optional['outputs.PatchDeploymentPatchConfigPreStepWindowsExecStepConfigGcsObject'] = None,
interpreter: Optional[str] = None,
local_path: Optional[str] = None):
"""
:param Sequence[int] allowed_success_codes: Defaults to [0]. A list of possible return values that the execution can return to indicate a success.
:param 'PatchDeploymentPatchConfigPreStepWindowsExecStepConfigGcsObjectArgs' gcs_object: A Cloud Storage object containing the executable.
Structure is documented below.
:param str interpreter: The script interpreter to use to run the script. If no interpreter is specified the script will
be executed directly, which will likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
:param str local_path: An absolute path to the executable on the VM.
"""
if allowed_success_codes is not None:
pulumi.set(__self__, "allowed_success_codes", allowed_success_codes)
if gcs_object is not None:
pulumi.set(__self__, "gcs_object", gcs_object)
if interpreter is not None:
pulumi.set(__self__, "interpreter", interpreter)
if local_path is not None:
pulumi.set(__self__, "local_path", local_path)
@property
@pulumi.getter(name="allowedSuccessCodes")
def allowed_success_codes(self) -> Optional[Sequence[int]]:
"""
Defaults to [0]. A list of possible return values that the execution can return to indicate a success.
"""
return pulumi.get(self, "allowed_success_codes")
@property
@pulumi.getter(name="gcsObject")
def gcs_object(self) -> Optional['outputs.PatchDeploymentPatchConfigPreStepWindowsExecStepConfigGcsObject']:
"""
A Cloud Storage object containing the executable.
Structure is documented below.
"""
return pulumi.get(self, "gcs_object")
@property
@pulumi.getter
def interpreter(self) -> Optional[str]:
"""
The script interpreter to use to run the script. If no interpreter is specified the script will
be executed directly, which will likely only succeed for scripts with shebang lines.
Possible values are `SHELL` and `POWERSHELL`.
"""
return pulumi.get(self, "interpreter")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
An absolute path to the executable on the VM.
"""
return pulumi.get(self, "local_path")
@pulumi.output_type
class PatchDeploymentPatchConfigPreStepWindowsExecStepConfigGcsObject(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "generationNumber":
suggest = "generation_number"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigPreStepWindowsExecStepConfigGcsObject. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigPreStepWindowsExecStepConfigGcsObject.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigPreStepWindowsExecStepConfigGcsObject.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
bucket: str,
generation_number: str,
object: str):
"""
:param str bucket: Bucket of the Cloud Storage object.
:param str generation_number: Generation number of the Cloud Storage object. This is used to ensure that the ExecStep specified by this PatchJob does not change.
:param str object: Name of the Cloud Storage object.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "generation_number", generation_number)
pulumi.set(__self__, "object", object)
@property
@pulumi.getter
def bucket(self) -> str:
"""
Bucket of the Cloud Storage object.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="generationNumber")
def generation_number(self) -> str:
"""
Generation number of the Cloud Storage object. This is used to ensure that the ExecStep specified by this PatchJob does not change.
"""
return pulumi.get(self, "generation_number")
@property
@pulumi.getter
def object(self) -> str:
"""
Name of the Cloud Storage object.
"""
return pulumi.get(self, "object")
@pulumi.output_type
class PatchDeploymentPatchConfigWindowsUpdate(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "exclusivePatches":
suggest = "exclusive_patches"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigWindowsUpdate. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigWindowsUpdate.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigWindowsUpdate.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
classifications: Optional[Sequence[str]] = None,
excludes: Optional[Sequence[str]] = None,
exclusive_patches: Optional[Sequence[str]] = None):
"""
:param Sequence[str] classifications: Only apply updates of these windows update classifications. If empty, all updates are applied.
Each value may be one of `CRITICAL`, `SECURITY`, `DEFINITION`, `DRIVER`, `FEATURE_PACK`, `SERVICE_PACK`, `TOOL`, `UPDATE_ROLLUP`, and `UPDATE`.
:param Sequence[str] excludes: List of KBs to exclude from update.
:param Sequence[str] exclusive_patches: An exclusive list of kbs to be updated. These are the only patches that will be updated.
This field must not be used with other patch configurations.
"""
if classifications is not None:
pulumi.set(__self__, "classifications", classifications)
if excludes is not None:
pulumi.set(__self__, "excludes", excludes)
if exclusive_patches is not None:
pulumi.set(__self__, "exclusive_patches", exclusive_patches)
@property
@pulumi.getter
def classifications(self) -> Optional[Sequence[str]]:
"""
Only apply updates of these windows update classifications. If empty, all updates are applied.
Each value may be one of `CRITICAL`, `SECURITY`, `DEFINITION`, `DRIVER`, `FEATURE_PACK`, `SERVICE_PACK`, `TOOL`, `UPDATE_ROLLUP`, and `UPDATE`.
"""
return pulumi.get(self, "classifications")
@property
@pulumi.getter
def excludes(self) -> Optional[Sequence[str]]:
"""
List of KBs to exclude from update.
"""
return pulumi.get(self, "excludes")
@property
@pulumi.getter(name="exclusivePatches")
def exclusive_patches(self) -> Optional[Sequence[str]]:
"""
An exclusive list of kbs to be updated. These are the only patches that will be updated.
This field must not be used with other patch configurations.
"""
return pulumi.get(self, "exclusive_patches")
@pulumi.output_type
class PatchDeploymentPatchConfigYum(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "exclusivePackages":
suggest = "exclusive_packages"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigYum. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigYum.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigYum.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
excludes: Optional[Sequence[str]] = None,
exclusive_packages: Optional[Sequence[str]] = None,
minimal: Optional[bool] = None,
security: Optional[bool] = None):
"""
:param Sequence[str] excludes: List of KBs to exclude from update.
:param Sequence[str] exclusive_packages: An exclusive list of packages to be updated. These are the only packages that will be updated.
If these packages are not installed, they will be ignored. This field cannot be specified with
any other patch configuration fields.
:param bool minimal: Will cause patch to run yum update-minimal instead.
:param bool security: Adds the --security flag to yum update. Not supported on all platforms.
"""
if excludes is not None:
pulumi.set(__self__, "excludes", excludes)
if exclusive_packages is not None:
pulumi.set(__self__, "exclusive_packages", exclusive_packages)
if minimal is not None:
pulumi.set(__self__, "minimal", minimal)
if security is not None:
pulumi.set(__self__, "security", security)
@property
@pulumi.getter
def excludes(self) -> Optional[Sequence[str]]:
"""
List of KBs to exclude from update.
"""
return pulumi.get(self, "excludes")
@property
@pulumi.getter(name="exclusivePackages")
def exclusive_packages(self) -> Optional[Sequence[str]]:
"""
An exclusive list of packages to be updated. These are the only packages that will be updated.
If these packages are not installed, they will be ignored. This field cannot be specified with
any other patch configuration fields.
"""
return pulumi.get(self, "exclusive_packages")
@property
@pulumi.getter
def minimal(self) -> Optional[bool]:
"""
Will cause patch to run yum update-minimal instead.
"""
return pulumi.get(self, "minimal")
@property
@pulumi.getter
def security(self) -> Optional[bool]:
"""
Adds the --security flag to yum update. Not supported on all platforms.
"""
return pulumi.get(self, "security")
@pulumi.output_type
class PatchDeploymentPatchConfigZypper(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "exclusivePatches":
suggest = "exclusive_patches"
elif key == "withOptional":
suggest = "with_optional"
elif key == "withUpdate":
suggest = "with_update"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentPatchConfigZypper. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentPatchConfigZypper.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentPatchConfigZypper.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
categories: Optional[Sequence[str]] = None,
excludes: Optional[Sequence[str]] = None,
exclusive_patches: Optional[Sequence[str]] = None,
severities: Optional[Sequence[str]] = None,
with_optional: Optional[bool] = None,
with_update: Optional[bool] = None):
"""
:param Sequence[str] categories: Install only patches with these categories. Common categories include security, recommended, and feature.
:param Sequence[str] excludes: List of KBs to exclude from update.
:param Sequence[str] exclusive_patches: An exclusive list of kbs to be updated. These are the only patches that will be updated.
This field must not be used with other patch configurations.
:param Sequence[str] severities: Install only patches with these severities. Common severities include critical, important, moderate, and low.
:param bool with_optional: Adds the --with-optional flag to zypper patch.
:param bool with_update: Adds the --with-update flag, to zypper patch.
"""
if categories is not None:
pulumi.set(__self__, "categories", categories)
if excludes is not None:
pulumi.set(__self__, "excludes", excludes)
if exclusive_patches is not None:
pulumi.set(__self__, "exclusive_patches", exclusive_patches)
if severities is not None:
pulumi.set(__self__, "severities", severities)
if with_optional is not None:
pulumi.set(__self__, "with_optional", with_optional)
if with_update is not None:
pulumi.set(__self__, "with_update", with_update)
@property
@pulumi.getter
def categories(self) -> Optional[Sequence[str]]:
"""
Install only patches with these categories. Common categories include security, recommended, and feature.
"""
return pulumi.get(self, "categories")
@property
@pulumi.getter
def excludes(self) -> Optional[Sequence[str]]:
"""
List of KBs to exclude from update.
"""
return pulumi.get(self, "excludes")
@property
@pulumi.getter(name="exclusivePatches")
def exclusive_patches(self) -> Optional[Sequence[str]]:
"""
An exclusive list of kbs to be updated. These are the only patches that will be updated.
This field must not be used with other patch configurations.
"""
return pulumi.get(self, "exclusive_patches")
@property
@pulumi.getter
def severities(self) -> Optional[Sequence[str]]:
"""
Install only patches with these severities. Common severities include critical, important, moderate, and low.
"""
return pulumi.get(self, "severities")
@property
@pulumi.getter(name="withOptional")
def with_optional(self) -> Optional[bool]:
"""
Adds the --with-optional flag to zypper patch.
"""
return pulumi.get(self, "with_optional")
@property
@pulumi.getter(name="withUpdate")
def with_update(self) -> Optional[bool]:
"""
Adds the --with-update flag, to zypper patch.
"""
return pulumi.get(self, "with_update")
@pulumi.output_type
class PatchDeploymentRecurringSchedule(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "timeOfDay":
suggest = "time_of_day"
elif key == "timeZone":
suggest = "time_zone"
elif key == "endTime":
suggest = "end_time"
elif key == "lastExecuteTime":
suggest = "last_execute_time"
elif key == "nextExecuteTime":
suggest = "next_execute_time"
elif key == "startTime":
suggest = "start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentRecurringSchedule. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentRecurringSchedule.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentRecurringSchedule.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
time_of_day: 'outputs.PatchDeploymentRecurringScheduleTimeOfDay',
time_zone: 'outputs.PatchDeploymentRecurringScheduleTimeZone',
end_time: Optional[str] = None,
last_execute_time: Optional[str] = None,
monthly: Optional['outputs.PatchDeploymentRecurringScheduleMonthly'] = None,
next_execute_time: Optional[str] = None,
start_time: Optional[str] = None,
weekly: Optional['outputs.PatchDeploymentRecurringScheduleWeekly'] = None):
"""
:param 'PatchDeploymentRecurringScheduleTimeOfDayArgs' time_of_day: Time of the day to run a recurring deployment.
Structure is documented below.
:param 'PatchDeploymentRecurringScheduleTimeZoneArgs' time_zone: Defines the time zone that timeOfDay is relative to. The rules for daylight saving time are
determined by the chosen time zone.
Structure is documented below.
:param str end_time: The end time at which a recurring patch deployment schedule is no longer active.
A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
:param str last_execute_time: -
The time the last patch job ran successfully.
A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
:param 'PatchDeploymentRecurringScheduleMonthlyArgs' monthly: Schedule with monthly executions.
Structure is documented below.
:param str next_execute_time: -
The time the next patch job is scheduled to run.
A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
:param str start_time: The time that the recurring schedule becomes effective. Defaults to createTime of the patch deployment.
A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
:param 'PatchDeploymentRecurringScheduleWeeklyArgs' weekly: Schedule with weekly executions.
Structure is documented below.
"""
pulumi.set(__self__, "time_of_day", time_of_day)
pulumi.set(__self__, "time_zone", time_zone)
if end_time is not None:
pulumi.set(__self__, "end_time", end_time)
if last_execute_time is not None:
pulumi.set(__self__, "last_execute_time", last_execute_time)
if monthly is not None:
pulumi.set(__self__, "monthly", monthly)
if next_execute_time is not None:
pulumi.set(__self__, "next_execute_time", next_execute_time)
if start_time is not None:
pulumi.set(__self__, "start_time", start_time)
if weekly is not None:
pulumi.set(__self__, "weekly", weekly)
@property
@pulumi.getter(name="timeOfDay")
def time_of_day(self) -> 'outputs.PatchDeploymentRecurringScheduleTimeOfDay':
"""
Time of the day to run a recurring deployment.
Structure is documented below.
"""
return pulumi.get(self, "time_of_day")
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> 'outputs.PatchDeploymentRecurringScheduleTimeZone':
"""
Defines the time zone that timeOfDay is relative to. The rules for daylight saving time are
determined by the chosen time zone.
Structure is documented below.
"""
return pulumi.get(self, "time_zone")
@property
@pulumi.getter(name="endTime")
def end_time(self) -> Optional[str]:
"""
The end time at which a recurring patch deployment schedule is no longer active.
A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
"""
return pulumi.get(self, "end_time")
@property
@pulumi.getter(name="lastExecuteTime")
def last_execute_time(self) -> Optional[str]:
"""
-
The time the last patch job ran successfully.
A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
"""
return pulumi.get(self, "last_execute_time")
@property
@pulumi.getter
def monthly(self) -> Optional['outputs.PatchDeploymentRecurringScheduleMonthly']:
"""
Schedule with monthly executions.
Structure is documented below.
"""
return pulumi.get(self, "monthly")
@property
@pulumi.getter(name="nextExecuteTime")
def next_execute_time(self) -> Optional[str]:
"""
-
The time the next patch job is scheduled to run.
A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
"""
return pulumi.get(self, "next_execute_time")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> Optional[str]:
"""
The time that the recurring schedule becomes effective. Defaults to createTime of the patch deployment.
A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
"""
return pulumi.get(self, "start_time")
@property
@pulumi.getter
def weekly(self) -> Optional['outputs.PatchDeploymentRecurringScheduleWeekly']:
"""
Schedule with weekly executions.
Structure is documented below.
"""
return pulumi.get(self, "weekly")
@pulumi.output_type
class PatchDeploymentRecurringScheduleMonthly(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "monthDay":
suggest = "month_day"
elif key == "weekDayOfMonth":
suggest = "week_day_of_month"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentRecurringScheduleMonthly. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentRecurringScheduleMonthly.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentRecurringScheduleMonthly.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
month_day: Optional[int] = None,
week_day_of_month: Optional['outputs.PatchDeploymentRecurringScheduleMonthlyWeekDayOfMonth'] = None):
"""
:param int month_day: One day of the month. 1-31 indicates the 1st to the 31st day. -1 indicates the last day of the month.
Months without the target day will be skipped. For example, a schedule to run "every month on the 31st"
will not run in February, April, June, etc.
:param 'PatchDeploymentRecurringScheduleMonthlyWeekDayOfMonthArgs' week_day_of_month: Week day in a month.
Structure is documented below.
"""
if month_day is not None:
pulumi.set(__self__, "month_day", month_day)
if week_day_of_month is not None:
pulumi.set(__self__, "week_day_of_month", week_day_of_month)
@property
@pulumi.getter(name="monthDay")
def month_day(self) -> Optional[int]:
"""
One day of the month. 1-31 indicates the 1st to the 31st day. -1 indicates the last day of the month.
Months without the target day will be skipped. For example, a schedule to run "every month on the 31st"
will not run in February, April, June, etc.
"""
return pulumi.get(self, "month_day")
@property
@pulumi.getter(name="weekDayOfMonth")
def week_day_of_month(self) -> Optional['outputs.PatchDeploymentRecurringScheduleMonthlyWeekDayOfMonth']:
"""
Week day in a month.
Structure is documented below.
"""
return pulumi.get(self, "week_day_of_month")
@pulumi.output_type
class PatchDeploymentRecurringScheduleMonthlyWeekDayOfMonth(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dayOfWeek":
suggest = "day_of_week"
elif key == "weekOrdinal":
suggest = "week_ordinal"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentRecurringScheduleMonthlyWeekDayOfMonth. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentRecurringScheduleMonthlyWeekDayOfMonth.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentRecurringScheduleMonthlyWeekDayOfMonth.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
day_of_week: str,
week_ordinal: int):
"""
:param str day_of_week: A day of the week.
Possible values are `MONDAY`, `TUESDAY`, `WEDNESDAY`, `THURSDAY`, `FRIDAY`, `SATURDAY`, and `SUNDAY`.
:param int week_ordinal: Week number in a month. 1-4 indicates the 1st to 4th week of the month. -1 indicates the last week of the month.
"""
pulumi.set(__self__, "day_of_week", day_of_week)
pulumi.set(__self__, "week_ordinal", week_ordinal)
@property
@pulumi.getter(name="dayOfWeek")
def day_of_week(self) -> str:
"""
A day of the week.
Possible values are `MONDAY`, `TUESDAY`, `WEDNESDAY`, `THURSDAY`, `FRIDAY`, `SATURDAY`, and `SUNDAY`.
"""
return pulumi.get(self, "day_of_week")
@property
@pulumi.getter(name="weekOrdinal")
def week_ordinal(self) -> int:
"""
Week number in a month. 1-4 indicates the 1st to 4th week of the month. -1 indicates the last week of the month.
"""
return pulumi.get(self, "week_ordinal")
@pulumi.output_type
class PatchDeploymentRecurringScheduleTimeOfDay(dict):
def __init__(__self__, *,
hours: Optional[int] = None,
minutes: Optional[int] = None,
nanos: Optional[int] = None,
seconds: Optional[int] = None):
"""
:param int hours: Hours of day in 24 hour format. Should be from 0 to 23.
An API may choose to allow the value "24:00:00" for scenarios like business closing time.
:param int minutes: Minutes of hour of day. Must be from 0 to 59.
:param int nanos: Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
:param int seconds: Seconds of minutes of the time. Must normally be from 0 to 59. An API may allow the value 60 if it allows leap-seconds.
"""
if hours is not None:
pulumi.set(__self__, "hours", hours)
if minutes is not None:
pulumi.set(__self__, "minutes", minutes)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
if seconds is not None:
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def hours(self) -> Optional[int]:
"""
Hours of day in 24 hour format. Should be from 0 to 23.
An API may choose to allow the value "24:00:00" for scenarios like business closing time.
"""
return pulumi.get(self, "hours")
@property
@pulumi.getter
def minutes(self) -> Optional[int]:
"""
Minutes of hour of day. Must be from 0 to 59.
"""
return pulumi.get(self, "minutes")
@property
@pulumi.getter
def nanos(self) -> Optional[int]:
"""
Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
"""
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> Optional[int]:
"""
Seconds of minutes of the time. Must normally be from 0 to 59. An API may allow the value 60 if it allows leap-seconds.
"""
return pulumi.get(self, "seconds")
@pulumi.output_type
class PatchDeploymentRecurringScheduleTimeZone(dict):
def __init__(__self__, *,
id: str,
version: Optional[str] = None):
"""
:param str id: IANA Time Zone Database time zone, e.g. "America/New_York".
:param str version: IANA Time Zone Database version number, e.g. "2019a".
"""
pulumi.set(__self__, "id", id)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def id(self) -> str:
"""
IANA Time Zone Database time zone, e.g. "America/New_York".
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
IANA Time Zone Database version number, e.g. "2019a".
"""
return pulumi.get(self, "version")
@pulumi.output_type
class PatchDeploymentRecurringScheduleWeekly(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dayOfWeek":
suggest = "day_of_week"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentRecurringScheduleWeekly. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentRecurringScheduleWeekly.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentRecurringScheduleWeekly.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
day_of_week: str):
"""
:param str day_of_week: A day of the week.
Possible values are `MONDAY`, `TUESDAY`, `WEDNESDAY`, `THURSDAY`, `FRIDAY`, `SATURDAY`, and `SUNDAY`.
"""
pulumi.set(__self__, "day_of_week", day_of_week)
@property
@pulumi.getter(name="dayOfWeek")
def day_of_week(self) -> str:
"""
A day of the week.
Possible values are `MONDAY`, `TUESDAY`, `WEDNESDAY`, `THURSDAY`, `FRIDAY`, `SATURDAY`, and `SUNDAY`.
"""
return pulumi.get(self, "day_of_week")
@pulumi.output_type
class PatchDeploymentRollout(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "disruptionBudget":
suggest = "disruption_budget"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PatchDeploymentRollout. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PatchDeploymentRollout.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PatchDeploymentRollout.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
disruption_budget: 'outputs.PatchDeploymentRolloutDisruptionBudget',
mode: str):
"""
:param 'PatchDeploymentRolloutDisruptionBudgetArgs' disruption_budget: The maximum number (or percentage) of VMs per zone to disrupt at any given moment. The number of VMs calculated from multiplying the percentage by the total number of VMs in a zone is rounded up.
During patching, a VM is considered disrupted from the time the agent is notified to begin until patching has completed. This disruption time includes the time to complete reboot and any post-patch steps.
A VM contributes to the disruption budget if its patching operation fails either when applying the patches, running pre or post patch steps, or if it fails to respond with a success notification before timing out. VMs that are not running or do not have an active agent do not count toward this disruption budget.
For zone-by-zone rollouts, if the disruption budget in a zone is exceeded, the patch job stops, because continuing to the next zone requires completion of the patch process in the previous zone.
For example, if the disruption budget has a fixed value of 10, and 8 VMs fail to patch in the current zone, the patch job continues to patch 2 VMs at a time until the zone is completed. When that zone is completed successfully, patching begins with 10 VMs at a time in the next zone. If 10 VMs in the next zone fail to patch, the patch job stops.
Structure is documented below.
:param str mode: Mode of the patch rollout.
Possible values are `ZONE_BY_ZONE` and `CONCURRENT_ZONES`.
"""
pulumi.set(__self__, "disruption_budget", disruption_budget)
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter(name="disruptionBudget")
def disruption_budget(self) -> 'outputs.PatchDeploymentRolloutDisruptionBudget':
"""
The maximum number (or percentage) of VMs per zone to disrupt at any given moment. The number of VMs calculated from multiplying the percentage by the total number of VMs in a zone is rounded up.
During patching, a VM is considered disrupted from the time the agent is notified to begin until patching has completed. This disruption time includes the time to complete reboot and any post-patch steps.
A VM contributes to the disruption budget if its patching operation fails either when applying the patches, running pre or post patch steps, or if it fails to respond with a success notification before timing out. VMs that are not running or do not have an active agent do not count toward this disruption budget.
For zone-by-zone rollouts, if the disruption budget in a zone is exceeded, the patch job stops, because continuing to the next zone requires completion of the patch process in the previous zone.
For example, if the disruption budget has a fixed value of 10, and 8 VMs fail to patch in the current zone, the patch job continues to patch 2 VMs at a time until the zone is completed. When that zone is completed successfully, patching begins with 10 VMs at a time in the next zone. If 10 VMs in the next zone fail to patch, the patch job stops.
Structure is documented below.
"""
return pulumi.get(self, "disruption_budget")
@property
@pulumi.getter
def mode(self) -> str:
"""
Mode of the patch rollout.
Possible values are `ZONE_BY_ZONE` and `CONCURRENT_ZONES`.
"""
return pulumi.get(self, "mode")
@pulumi.output_type
class PatchDeploymentRolloutDisruptionBudget(dict):
def __init__(__self__, *,
fixed: Optional[int] = None,
percentage: Optional[int] = None):
"""
:param int fixed: Specifies a fixed value.
:param int percentage: Specifies the relative value defined as a percentage, which will be multiplied by a reference value.
"""
if fixed is not None:
pulumi.set(__self__, "fixed", fixed)
if percentage is not None:
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter
def fixed(self) -> Optional[int]:
"""
Specifies a fixed value.
"""
return pulumi.get(self, "fixed")
@property
@pulumi.getter
def percentage(self) -> Optional[int]:
"""
Specifies the relative value defined as a percentage, which will be multiplied by a reference value.
"""
return pulumi.get(self, "percentage")
| 41.955236
| 621
| 0.659582
| 28,306
| 262,430
| 5.93973
| 0.03501
| 0.016362
| 0.024665
| 0.036049
| 0.791453
| 0.778921
| 0.769107
| 0.721674
| 0.714138
| 0.702908
| 0
| 0.004228
| 0.25281
| 262,430
| 6,254
| 622
| 41.961944
| 0.853207
| 0.313901
| 0
| 0.778083
| 1
| 0.020324
| 0.214658
| 0.113303
| 0
| 0
| 0
| 0
| 0
| 1
| 0.176325
| false
| 0
| 0.001648
| 0
| 0.333974
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a3865fc0878a8aede115290b2e1cfd221c77b62
| 14
|
py
|
Python
|
test/integration/StringSubstringLength/start end numerics.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 30
|
2019-10-29T12:47:50.000Z
|
2022-02-12T06:41:39.000Z
|
test/integration/StringSubstringLength/start end numerics.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 247
|
2017-09-21T17:11:18.000Z
|
2019-10-08T12:59:07.000Z
|
test/integration/StringSubstringLength/start end numerics.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 17
|
2017-10-01T16:53:20.000Z
|
2018-11-28T07:20:35.000Z
|
#
abc[3:10]
#
| 3.5
| 9
| 0.428571
| 3
| 14
| 2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0.214286
| 14
| 3
| 10
| 4.666667
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a45aa6be15c1c5dd0cfec3e73c3fbe7a92cd484
| 62,038
|
py
|
Python
|
nerodia/container.py
|
mark0203/nerodia
|
2631c210fbaa0a7b5d598016e11ad7c7af083237
|
[
"MIT"
] | null | null | null |
nerodia/container.py
|
mark0203/nerodia
|
2631c210fbaa0a7b5d598016e11ad7c7af083237
|
[
"MIT"
] | null | null | null |
nerodia/container.py
|
mark0203/nerodia
|
2631c210fbaa0a7b5d598016e11ad7c7af083237
|
[
"MIT"
] | null | null | null |
from re import compile
import nerodia
class Container(object):
def element(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, self._extract_selector(*args, **kwargs))
def elements(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, self._extract_selector(*args, **kwargs))
@staticmethod
def _extract_selector(*args, **kwargs):
if len(args) == 2:
nerodia.logger.deprecate('Using ordered parameters to locate elements '
'({}, {})'.format(*args), '{{{}={}}}'.format(*args),
ids='selector_parameters')
selector = {args[0]: args[1]}
elif len(args) == 1 and isinstance(args[0], dict):
kwargs.update(args[0])
selector = kwargs
elif not args:
selector = kwargs
else:
raise ValueError('expected kwargs dict, got {}'.format(kwargs))
for k, v in selector.copy().items():
if isinstance(v, str) and v.startswith(r'/') and v.endswith(r'/'):
selector[k] = compile(v[1:-1])
return selector
# Plural of 'a' cannot be a method name, use link/links instead
def link(self, *args, **kwargs):
from .elements.link import Anchor
return Anchor(self, dict(self._extract_selector(*args, **kwargs), tag_name='a'))
def links(self, *args, **kwargs):
from .elements.html_elements import AnchorCollection
return AnchorCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='a'))
def abbr(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='abbr'))
def abbrs(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='abbr'))
def address(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='address'))
def addresses(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='address'))
def area(self, *args, **kwargs):
from .elements.area import Area
return Area(self, dict(self._extract_selector(*args, **kwargs), tag_name='area'))
def areas(self, *args, **kwargs):
from .elements.html_elements import AreaCollection
return AreaCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='area'))
def article(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='article'))
def articles(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='article'))
def aside(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='aside'))
def asides(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='aside'))
def audio(self, *args, **kwargs):
from .elements.html_elements import Audio
return Audio(self, dict(self._extract_selector(*args, **kwargs), tag_name='audio'))
def audios(self, *args, **kwargs):
from .elements.html_elements import AudioCollection
return AudioCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='audio'))
def b(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='b'))
def bs(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='b'))
def base(self, *args, **kwargs):
from .elements.html_elements import Base
return Base(self, dict(self._extract_selector(*args, **kwargs), tag_name='base'))
def bases(self, *args, **kwargs):
from .elements.html_elements import BaseCollection
return BaseCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='base'))
def bdi(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='bdi'))
def bdis(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='bdi'))
def bdo(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='bdo'))
def bdos(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='bdo'))
def blockquote(self, *args, **kwargs):
from .elements.html_elements import Quote
return Quote(self, dict(self._extract_selector(*args, **kwargs), tag_name='blockquote'))
def blockquotes(self, *args, **kwargs):
from .elements.html_elements import QuoteCollection
return QuoteCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='blockquote'))
def body(self, *args, **kwargs):
from .elements.html_elements import Body
return Body(self, dict(self._extract_selector(*args, **kwargs), tag_name='body'))
def bodys(self, *args, **kwargs):
from .elements.html_elements import BodyCollection
return BodyCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='body'))
def br(self, *args, **kwargs):
from .elements.html_elements import BR
return BR(self, dict(self._extract_selector(*args, **kwargs), tag_name='br'))
def brs(self, *args, **kwargs):
from .elements.html_elements import BRCollection
return BRCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='br'))
def button(self, *args, **kwargs):
from .elements.button import Button
return Button(self, dict(self._extract_selector(*args, **kwargs), tag_name='button'))
def buttons(self, *args, **kwargs):
from .elements.html_elements import ButtonCollection
return ButtonCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='button'))
def canvas(self, *args, **kwargs):
from .elements.html_elements import Canvas
return Canvas(self, dict(self._extract_selector(*args, **kwargs), tag_name='canvas'))
def canvases(self, *args, **kwargs):
from .elements.html_elements import CanvasCollection
return CanvasCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='canvas'))
def caption(self, *args, **kwargs):
from .elements.html_elements import TableCaption
return TableCaption(self, dict(self._extract_selector(*args, **kwargs),
tag_name='caption'))
def captions(self, *args, **kwargs):
from .elements.html_elements import TableCaptionCollection
return TableCaptionCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='caption'))
def checkbox(self, *args, **kwargs):
from .elements.check_box import CheckBox
return CheckBox(self, dict(self._extract_selector(*args, **kwargs), tag_name='input',
type='checkbox',))
def checkboxes(self, *args, **kwargs):
from .elements.check_box import CheckBoxCollection
return CheckBoxCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='input', type='checkbox',))
def cite(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='cite'))
def cites(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='cite'))
def code(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='code'))
def codes(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='code'))
def col(self, *args, **kwargs):
from .elements.html_elements import TableCol
return TableCol(self, dict(self._extract_selector(*args, **kwargs), tag_name='col'))
def cols(self, *args, **kwargs):
from .elements.html_elements import TableColCollection
return TableColCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='col'))
def colgroup(self, *args, **kwargs):
from .elements.html_elements import TableCol
return TableCol(self, dict(self._extract_selector(*args, **kwargs), tag_name='colgroup'))
def colgroups(self, *args, **kwargs):
from .elements.html_elements import TableColCollection
return TableColCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='colgroup'))
def data(self, *args, **kwargs):
from .elements.html_elements import Data
return Data(self, dict(self._extract_selector(*args, **kwargs), tag_name='data'))
def datas(self, *args, **kwargs):
from .elements.html_elements import DataCollection
return DataCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='data'))
def datalist(self, *args, **kwargs):
from .elements.html_elements import DataList
return DataList(self, dict(self._extract_selector(*args, **kwargs), tag_name='datalist'))
def datalists(self, *args, **kwargs):
from .elements.html_elements import DataListCollection
return DataListCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='datalist'))
def date_field(self, *args, **kwargs):
from .elements.date_field import DateField
return DateField(self, dict(self._extract_selector(*args, **kwargs), tag_name='input',
type='date'))
def date_fields(self, *args, **kwargs):
from .elements.date_field import DateFieldCollection
return DateFieldCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='input', type='date'))
def date_time_field(self, *args, **kwargs):
from .elements.date_time_field import DateTimeField
return DateTimeField(self, dict(self._extract_selector(*args, **kwargs), tag_name='input',
type='datetime-local'))
def date_time_fields(self, *args, **kwargs):
from .elements.date_time_field import DateTimeFieldCollection
return DateTimeFieldCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='input', type='datetime-local'))
def dd(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='dd'))
def dds(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='dd'))
# 'del' is an invalid method name, use delete/deletes instead
def delete(self, *args, **kwargs):
from .elements.html_elements import Mod
return Mod(self, dict(self._extract_selector(*args, **kwargs), tag_name='del'))
def deletes(self, *args, **kwargs):
from .elements.html_elements import ModCollection
return ModCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='del'))
def details(self, *args, **kwargs):
from .elements.html_elements import Details
return Details(self, dict(self._extract_selector(*args, **kwargs), tag_name='details'))
def detailses(self, *args, **kwargs):
from .elements.html_elements import DetailsCollection
return DetailsCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='details'))
def dfn(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='dfn'))
def dfns(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='dfn'))
def dialog(self, *args, **kwargs):
from .elements.html_elements import Dialog
return Dialog(self, dict(self._extract_selector(*args, **kwargs), tag_name='dialog'))
def dialogs(self, *args, **kwargs):
from .elements.html_elements import DialogCollection
return DialogCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='dialog'))
def div(self, *args, **kwargs):
from .elements.html_elements import Div
return Div(self, dict(self._extract_selector(*args, **kwargs), tag_name='div'))
def divs(self, *args, **kwargs):
from .elements.html_elements import DivCollection
return DivCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='div'))
def dl(self, *args, **kwargs):
from .elements.d_list import DList
return DList(self, dict(self._extract_selector(*args, **kwargs), tag_name='dl'))
def dls(self, *args, **kwargs):
from .elements.html_elements import DListCollection
return DListCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='dl'))
def dt(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='dt'))
def dts(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='dt'))
def em(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='em'))
def ems(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='em'))
def embed(self, *args, **kwargs):
from .elements.html_elements import Embed
return Embed(self, dict(self._extract_selector(*args, **kwargs), tag_name='embed'))
def embeds(self, *args, **kwargs):
from .elements.html_elements import EmbedCollection
return EmbedCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='embed'))
def fieldset(self, *args, **kwargs):
from .elements.html_elements import FieldSet
return FieldSet(self, dict(self._extract_selector(*args, **kwargs), tag_name='fieldset'))
field_set = fieldset
def fieldsets(self, *args, **kwargs):
from .elements.html_elements import FieldSetCollection
return FieldSetCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='fieldset'))
field_sets = fieldsets
def figcaption(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs),
tag_name='figcaption'))
def figcaptions(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='figcaption'))
def figure(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='figure'))
def figures(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='figure'))
def file_field(self, *args, **kwargs):
from .elements.file_field import FileField
return FileField(self, dict(self._extract_selector(*args, **kwargs), tag_name='input',
type='file'))
def file_fields(self, *args, **kwargs):
from .elements.file_field import FileFieldCollection
return FileFieldCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='input', type='file'))
def font(self, *args, **kwargs):
from .elements.font import Font
return Font(self, dict(self._extract_selector(*args, **kwargs), tag_name='font'))
def fonts(self, *args, **kwargs):
from .elements.html_elements import FontCollection
return FontCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='font'))
def footer(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='footer'))
def footers(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='footer'))
def form(self, *args, **kwargs):
from .elements.form import Form
return Form(self, dict(self._extract_selector(*args, **kwargs), tag_name='form'))
def forms(self, *args, **kwargs):
from .elements.html_elements import FormCollection
return FormCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='form'))
def frame(self, *args, **kwargs):
from .elements.i_frame import Frame
return Frame(self, dict(self._extract_selector(*args, **kwargs), tag_name='frame'))
def frames(self, *args, **kwargs):
from .elements.i_frame import FrameCollection
return FrameCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='frame'))
def frameset(self, *args, **kwargs):
from .elements.html_elements import FrameSet
return FrameSet(self, dict(self._extract_selector(*args, **kwargs), tag_name='frameset'))
def framesets(self, *args, **kwargs):
from .elements.html_elements import FrameSetCollection
return FrameSetCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='frameset'))
def h1(self, *args, **kwargs):
from .elements.html_elements import Heading
return Heading(self, dict(self._extract_selector(*args, **kwargs), tag_name='h1'))
def h1s(self, *args, **kwargs):
from .elements.html_elements import HeadingCollection
return HeadingCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='h1'))
def h2(self, *args, **kwargs):
from .elements.html_elements import Heading
return Heading(self, dict(self._extract_selector(*args, **kwargs), tag_name='h2'))
def h2s(self, *args, **kwargs):
from .elements.html_elements import HeadingCollection
return HeadingCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='h2'))
def h3(self, *args, **kwargs):
from .elements.html_elements import Heading
return Heading(self, dict(self._extract_selector(*args, **kwargs), tag_name='h3'))
def h3s(self, *args, **kwargs):
from .elements.html_elements import HeadingCollection
return HeadingCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='h3'))
def h4(self, *args, **kwargs):
from .elements.html_elements import Heading
return Heading(self, dict(self._extract_selector(*args, **kwargs), tag_name='h4'))
def h4s(self, *args, **kwargs):
from .elements.html_elements import HeadingCollection
return HeadingCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='h4'))
def h5(self, *args, **kwargs):
from .elements.html_elements import Heading
return Heading(self, dict(self._extract_selector(*args, **kwargs), tag_name='h5'))
def h5s(self, *args, **kwargs):
from .elements.html_elements import HeadingCollection
return HeadingCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='h5'))
def h6(self, *args, **kwargs):
from .elements.html_elements import Heading
return Heading(self, dict(self._extract_selector(*args, **kwargs), tag_name='h6'))
def h6s(self, *args, **kwargs):
from .elements.html_elements import HeadingCollection
return HeadingCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='h6'))
def head(self, *args, **kwargs):
from .elements.html_elements import Head
return Head(self, dict(self._extract_selector(*args, **kwargs), tag_name='head'))
def heads(self, *args, **kwargs):
from .elements.html_elements import HeadCollection
return HeadCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='head'))
def header(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='header'))
def headers(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='header'))
def hgroup(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='hgroup'))
def hgroups(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='hgroup'))
def hidden(self, *args, **kwargs):
from .elements.hidden import Hidden
return Hidden(self, dict(self._extract_selector(*args, **kwargs), tag_name='input',
type='hidden'))
def hiddens(self, *args, **kwargs):
from .elements.hidden import HiddenCollection
return HiddenCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='input', type='hidden'))
def hr(self, *args, **kwargs):
from .elements.html_elements import HR
return HR(self, dict(self._extract_selector(*args, **kwargs), tag_name='hr'))
def hrs(self, *args, **kwargs):
from .elements.html_elements import HRCollection
return HRCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='hr'))
def html(self, *args, **kwargs):
from .elements.html_elements import Html
return Html(self, dict(self._extract_selector(*args, **kwargs), tag_name='html'))
def htmls(self, *args, **kwargs):
from .elements.html_elements import HtmlCollection
return HtmlCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='html'))
# Plural of 'i' cannot be a method name, use ital/itals instead
def ital(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='i'))
def itals(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='i'))
def iframe(self, *args, **kwargs):
from .elements.i_frame import IFrame
return IFrame(self, dict(self._extract_selector(*args, **kwargs), tag_name='iframe'))
def iframes(self, *args, **kwargs):
from .elements.html_elements import IFrameCollection
return IFrameCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='iframe'))
def img(self, *args, **kwargs):
from .elements.image import Image
return Image(self, dict(self._extract_selector(*args, **kwargs), tag_name='img'))
image = img
def imgs(self, *args, **kwargs):
from .elements.html_elements import ImageCollection
return ImageCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='img'))
images = imgs
def input(self, *args, **kwargs):
from .elements.input import Input
return Input(self, dict(self._extract_selector(*args, **kwargs), tag_name='input'))
def inputs(self, *args, **kwargs):
from .elements.html_elements import InputCollection
return InputCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='input'))
def ins(self, *args, **kwargs):
from .elements.html_elements import Mod
return Mod(self, dict(self._extract_selector(*args, **kwargs), tag_name='ins'))
def inses(self, *args, **kwargs):
from .elements.html_elements import ModCollection
return ModCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='ins'))
def kbd(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='kbd'))
def kbds(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='kbd'))
def label(self, *args, **kwargs):
from .elements.html_elements import Label
return Label(self, dict(self._extract_selector(*args, **kwargs), tag_name='label'))
def labels(self, *args, **kwargs):
from .elements.html_elements import LabelCollection
return LabelCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='label'))
def legend(self, *args, **kwargs):
from .elements.html_elements import Legend
return Legend(self, dict(self._extract_selector(*args, **kwargs), tag_name='legend'))
def legends(self, *args, **kwargs):
from .elements.html_elements import LegendCollection
return LegendCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='legend'))
def li(self, *args, **kwargs):
from .elements.html_elements import LI
return LI(self, dict(self._extract_selector(*args, **kwargs), tag_name='li'))
def lis(self, *args, **kwargs):
from .elements.html_elements import LICollection
return LICollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='li'))
def main(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='main'))
def mains(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='main'))
def map(self, *args, **kwargs):
from .elements.html_elements import Map
return Map(self, dict(self._extract_selector(*args, **kwargs), tag_name='map'))
def maps(self, *args, **kwargs):
from .elements.html_elements import MapCollection
return MapCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='map'))
def mark(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='mark'))
def marks(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='mark'))
def meta(self, *args, **kwargs):
from .elements.html_elements import Meta
return Meta(self, dict(self._extract_selector(*args, **kwargs), tag_name='meta'))
def metas(self, *args, **kwargs):
from .elements.html_elements import MetaCollection
return MetaCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='meta'))
def meter(self, *args, **kwargs):
from .elements.html_elements import Meter
return Meter(self, dict(self._extract_selector(*args, **kwargs), tag_name='meter'))
def meters(self, *args, **kwargs):
from .elements.html_elements import MeterCollection
return MeterCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='meter'))
def nav(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='nav'))
def navs(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='nav'))
def noscript(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='noscript'))
def noscripts(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='noscript'))
def object(self, *args, **kwargs):
from .elements.html_elements import Object
return Object(self, dict(self._extract_selector(*args, **kwargs), tag_name='object'))
def objects(self, *args, **kwargs):
from .elements.html_elements import ObjectCollection
return ObjectCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='object'))
def ol(self, *args, **kwargs):
from .elements.list import OList
return OList(self, dict(self._extract_selector(*args, **kwargs), tag_name='ol'))
def ols(self, *args, **kwargs):
from .elements.html_elements import OListCollection
return OListCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='ol'))
def optgroup(self, *args, **kwargs):
from .elements.html_elements import OptGroup
return OptGroup(self, dict(self._extract_selector(*args, **kwargs), tag_name='optgroup'))
def optgroups(self, *args, **kwargs):
from .elements.html_elements import OptGroupCollection
return OptGroupCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='optgroup'))
def option(self, *args, **kwargs):
from .elements.option import Option
return Option(self, dict(self._extract_selector(*args, **kwargs), tag_name='option'))
def options(self, *args, **kwargs):
from .elements.html_elements import OptionCollection
return OptionCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='option'))
def output(self, *args, **kwargs):
from .elements.html_elements import Output
return Output(self, dict(self._extract_selector(*args, **kwargs), tag_name='output'))
def outputs(self, *args, **kwargs):
from .elements.html_elements import OutputCollection
return OutputCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='output'))
def p(self, *args, **kwargs):
from .elements.html_elements import Paragraph
return Paragraph(self, dict(self._extract_selector(*args, **kwargs), tag_name='p'))
def ps(self, *args, **kwargs):
from .elements.html_elements import ParagraphCollection
return ParagraphCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='p'))
def param(self, *args, **kwargs):
from .elements.html_elements import Param
return Param(self, dict(self._extract_selector(*args, **kwargs), tag_name='param'))
def params(self, *args, **kwargs):
from .elements.html_elements import ParamCollection
return ParamCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='param'))
def pre(self, *args, **kwargs):
from .elements.html_elements import Pre
return Pre(self, dict(self._extract_selector(*args, **kwargs), tag_name='pre'))
def pres(self, *args, **kwargs):
from .elements.html_elements import PreCollection
return PreCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='pre'))
def progress(self, *args, **kwargs):
from .elements.html_elements import Progress
return Progress(self, dict(self._extract_selector(*args, **kwargs), tag_name='progress'))
def progresses(self, *args, **kwargs):
from .elements.html_elements import ProgressCollection
return ProgressCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='progress'))
def q(self, *args, **kwargs):
from .elements.html_elements import Quote
return Quote(self, dict(self._extract_selector(*args, **kwargs), tag_name='q'))
def qs(self, *args, **kwargs):
from .elements.html_elements import QuoteCollection
return QuoteCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='q'))
def radio(self, *args, **kwargs):
from .elements.radio import Radio
return Radio(self, dict(self._extract_selector(*args, **kwargs), tag_name='input',
type='radio'))
def radios(self, *args, **kwargs):
from .elements.radio import RadioCollection
return RadioCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='input', type='radio'))
def radio_set(self, *args, **kwargs):
from .elements.radio_set import RadioSet
return RadioSet(self, dict(self._extract_selector(*args, **kwargs),
tag_name='input', type='radio'))
def rp(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='rp'))
def rps(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='rp'))
def rt(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='rt'))
def rts(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='rt'))
def ruby(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='ruby'))
def rubies(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='ruby'))
def s(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='s'))
def ss(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='s'))
def samp(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='samp'))
def samps(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='samp'))
def script(self, *args, **kwargs):
from .elements.html_elements import Script
return Script(self, dict(self._extract_selector(*args, **kwargs), tag_name='script'))
def scripts(self, *args, **kwargs):
from .elements.html_elements import ScriptCollection
return ScriptCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='script'))
def section(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='section'))
def sections(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='section'))
def select(self, *args, **kwargs):
from .elements.select import Select
return Select(self, dict(self._extract_selector(*args, **kwargs), tag_name='select'))
select_list = select
def selects(self, *args, **kwargs):
from .elements.html_elements import SelectCollection
return SelectCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='select'))
select_lists = selects
def small(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='small'))
def smalls(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='small'))
def source(self, *args, **kwargs):
from .elements.html_elements import Source
return Source(self, dict(self._extract_selector(*args, **kwargs), tag_name='source'))
def sources(self, *args, **kwargs):
from .elements.html_elements import SourceCollection
return SourceCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='source'))
def span(self, *args, **kwargs):
from .elements.html_elements import Span
return Span(self, dict(self._extract_selector(*args, **kwargs), tag_name='span'))
def spans(self, *args, **kwargs):
from .elements.html_elements import SpanCollection
return SpanCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='span'))
def strong(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='strong'))
def strongs(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='strong'))
def style(self, *args, **kwargs):
from .elements.html_elements import Style
return Style(self, dict(self._extract_selector(*args, **kwargs), tag_name='style'))
def styles(self, *args, **kwargs):
from .elements.html_elements import StyleCollection
return StyleCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='style'))
def sub(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='sub'))
def subs(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='sub'))
def summary(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='summary'))
def summaries(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='summary'))
def sup(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='sup'))
def sups(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='sup'))
def table(self, *args, **kwargs):
from .elements.table import Table
return Table(self, dict(self._extract_selector(*args, **kwargs), tag_name='table'))
def tables(self, *args, **kwargs):
from .elements.html_elements import TableCollection
return TableCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='table'))
def tbody(self, *args, **kwargs):
from .elements.table_section import TableSection
return TableSection(self, dict(self._extract_selector(*args, **kwargs), tag_name='tbody'))
def tbodys(self, *args, **kwargs):
from .elements.html_elements import TableSectionCollection
return TableSectionCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='tbody'))
def td(self, *args, **kwargs):
from .elements.table_data_cell import TableDataCell
return TableDataCell(self, dict(self._extract_selector(*args, **kwargs), tag_name='td'))
def tds(self, *args, **kwargs):
from .elements.html_elements import TableDataCellCollection
return TableDataCellCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='td'))
def template(self, *args, **kwargs):
from .elements.html_elements import Template
return Template(self, dict(self._extract_selector(*args, **kwargs), tag_name='template'))
def templates(self, *args, **kwargs):
from .elements.html_elements import TemplateCollection
return TemplateCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='template'))
def textarea(self, *args, **kwargs):
from .elements.text_area import TextArea
return TextArea(self, dict(self._extract_selector(*args, **kwargs), tag_name='textarea'))
def textareas(self, *args, **kwargs):
from .elements.html_elements import TextAreaCollection
return TextAreaCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='textarea'))
def text_field(self, *args, **kwargs):
from .elements.text_field import TextField
return TextField(self, dict(self._extract_selector(*args, **kwargs), tag_name='input'))
def text_fields(self, *args, **kwargs):
from .elements.text_field import TextFieldCollection
return TextFieldCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='input'))
def tfoot(self, *args, **kwargs):
from .elements.table_section import TableSection
return TableSection(self, dict(self._extract_selector(*args, **kwargs), tag_name='tfoot'))
def tfoots(self, *args, **kwargs):
from .elements.html_elements import TableSectionCollection
return TableSectionCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='tfoot'))
def th(self, *args, **kwargs):
from .elements.html_elements import TableHeaderCell
return TableHeaderCell(self, dict(self._extract_selector(*args, **kwargs), tag_name='th'))
def ths(self, *args, **kwargs):
from .elements.html_elements import TableHeaderCellCollection
return TableHeaderCellCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='th'))
def thead(self, *args, **kwargs):
from .elements.table_section import TableSection
return TableSection(self, dict(self._extract_selector(*args, **kwargs), tag_name='thead'))
def theads(self, *args, **kwargs):
from .elements.html_elements import TableSectionCollection
return TableSectionCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='thead'))
def time(self, *args, **kwargs):
from .elements.html_elements import Time
return Time(self, dict(self._extract_selector(*args, **kwargs), tag_name='time'))
def times(self, *args, **kwargs):
from .elements.html_elements import TimeCollection
return TimeCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='time'))
def title(self, *args, **kwargs):
from .elements.html_elements import Title
return Title(self, dict(self._extract_selector(*args, **kwargs), tag_name='title'))
def titles(self, *args, **kwargs):
from .elements.html_elements import TitleCollection
return TitleCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='title'))
def tr(self, *args, **kwargs):
from .elements.table_row import TableRow
return TableRow(self, dict(self._extract_selector(*args, **kwargs), tag_name='tr'))
def trs(self, *args, **kwargs):
from .elements.html_elements import TableRowCollection
return TableRowCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='tr'))
def track(self, *args, **kwargs):
from .elements.html_elements import Track
return Track(self, dict(self._extract_selector(*args, **kwargs), tag_name='track'))
def tracks(self, *args, **kwargs):
from .elements.html_elements import TrackCollection
return TrackCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='track'))
def u(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='u'))
def us(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='u'))
def ul(self, *args, **kwargs):
from .elements.list import UList
return UList(self, dict(self._extract_selector(*args, **kwargs), tag_name='ul'))
def uls(self, *args, **kwargs):
from .elements.html_elements import UListCollection
return UListCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='ul'))
def var(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='var'))
def vars(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='var'))
def video(self, *args, **kwargs):
from .elements.html_elements import Video
return Video(self, dict(self._extract_selector(*args, **kwargs), tag_name='video'))
def videos(self, *args, **kwargs):
from .elements.html_elements import VideoCollection
return VideoCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='video'))
def wbr(self, *args, **kwargs):
from .elements.html_elements import HTMLElement
return HTMLElement(self, dict(self._extract_selector(*args, **kwargs), tag_name='wbr'))
def wbrs(self, *args, **kwargs):
from .elements.html_elements import HTMLElementCollection
return HTMLElementCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='wbr'))
# SVG
def circle(self, *args, **kwargs):
from .elements.svg_elements import Circle
return Circle(self, dict(self._extract_selector(*args, **kwargs), tag_name='circle'))
def circles(self, *args, **kwargs):
from .elements.svg_elements import CircleCollection
return CircleCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='circle'))
def defs(self, *args, **kwargs):
from .elements.svg_elements import Defs
return Defs(self, dict(self._extract_selector(*args, **kwargs), tag_name='defs'))
def defss(self, *args, **kwargs):
from .elements.svg_elements import DefsCollection
return DefsCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='defs'))
def desc(self, *args, **kwargs):
from .elements.svg_elements import Desc
return Desc(self, dict(self._extract_selector(*args, **kwargs), tag_name='desc'))
def descs(self, *args, **kwargs):
from .elements.svg_elements import DescCollection
return DescCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='desc'))
def ellipse(self, *args, **kwargs):
from .elements.svg_elements import Ellipse
return Ellipse(self, dict(self._extract_selector(*args, **kwargs), tag_name='ellipse'))
def ellipses(self, *args, **kwargs):
from .elements.svg_elements import EllipseCollection
return EllipseCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='ellipse'))
def foreign_object(self, *args, **kwargs):
from .elements.svg_elements import ForeignObject
return ForeignObject(self, dict(self._extract_selector(*args, **kwargs),
tag_name='foreignObject'))
def foreign_objects(self, *args, **kwargs):
from .elements.svg_elements import ForeignObjectCollection
return ForeignObjectCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='foreignObject'))
def g(self, *args, **kwargs):
from .elements.svg_elements import G
return G(self, dict(self._extract_selector(*args, **kwargs), tag_name='g'))
def gs(self, *args, **kwargs):
from .elements.svg_elements import GCollection
return GCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='g'))
def line(self, *args, **kwargs):
from .elements.svg_elements import Line
return Line(self, dict(self._extract_selector(*args, **kwargs), tag_name='line'))
def lines(self, *args, **kwargs):
from .elements.svg_elements import LineCollection
return LineCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='line'))
def linear_gradient(self, *args, **kwargs):
from .elements.svg_elements import LinearGradient
return LinearGradient(self, dict(self._extract_selector(*args, **kwargs),
tag_name='linearGradient'))
def linear_gradients(self, *args, **kwargs):
from .elements.svg_elements import LinearGradientCollection
return LinearGradientCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='linearGradient'))
def marker(self, *args, **kwargs):
from .elements.svg_elements import Marker
return Marker(self, dict(self._extract_selector(*args, **kwargs), tag_name='marker'))
def markers(self, *args, **kwargs):
from .elements.svg_elements import MarkerCollection
return MarkerCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='marker'))
def metadata(self, *args, **kwargs):
from .elements.svg_elements import Metadata
return Metadata(self, dict(self._extract_selector(*args, **kwargs), tag_name='metadata'))
def metadatas(self, *args, **kwargs):
from .elements.svg_elements import MetadataCollection
return MetadataCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='metadata'))
def path(self, *args, **kwargs):
from .elements.svg_elements import Path
return Path(self, dict(self._extract_selector(*args, **kwargs), tag_name='path'))
def paths(self, *args, **kwargs):
from .elements.svg_elements import PathCollection
return PathCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='path'))
def pattern(self, *args, **kwargs):
from .elements.svg_elements import Pattern
return Pattern(self, dict(self._extract_selector(*args, **kwargs), tag_name='pattern'))
def patterns(self, *args, **kwargs):
from .elements.svg_elements import PatternCollection
return PatternCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='pattern'))
def polygon(self, *args, **kwargs):
from .elements.svg_elements import Polygon
return Polygon(self, dict(self._extract_selector(*args, **kwargs), tag_name='polygon'))
def polygons(self, *args, **kwargs):
from .elements.svg_elements import PolygonCollection
return PolygonCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='polygon'))
def polyline(self, *args, **kwargs):
from .elements.svg_elements import Polyline
return Polyline(self, dict(self._extract_selector(*args, **kwargs), tag_name='polygon'))
def polylines(self, *args, **kwargs):
from .elements.svg_elements import PolylineCollection
return PolylineCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='polygon'))
def radial_gradient(self, *args, **kwargs):
from .elements.svg_elements import RadialGradient
return RadialGradient(self, dict(self._extract_selector(*args, **kwargs),
tag_name='radialGradient'))
def radial_gradients(self, *args, **kwargs):
from .elements.svg_elements import RadialGradientCollection
return RadialGradientCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='radialGradient'))
def stop(self, *args, **kwargs):
from .elements.svg_elements import Stop
return Stop(self, dict(self._extract_selector(*args, **kwargs), tag_name='stop'))
def stops(self, *args, **kwargs):
from .elements.svg_elements import StopCollection
return StopCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='stop'))
def svg(self, *args, **kwargs):
from .elements.svg_elements import SVG
return SVG(self, dict(self._extract_selector(*args, **kwargs), tag_name='svg'))
def svgs(self, *args, **kwargs):
from .elements.svg_elements import SVGCollection
return SVGCollection(self, dict(self._extract_selector(*args, **kwargs), tag_name='svg'))
def switch(self, *args, **kwargs):
from .elements.svg_elements import Switch
return Switch(self, dict(self._extract_selector(*args, **kwargs), tag_name='switch'))
def switches(self, *args, **kwargs):
from .elements.svg_elements import SwitchCollection
return SwitchCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='switch'))
def symbol(self, *args, **kwargs):
from .elements.svg_elements import Symbol
return Symbol(self, dict(self._extract_selector(*args, **kwargs), tag_name='symbol'))
def symbols(self, *args, **kwargs):
from .elements.svg_elements import SymbolCollection
return SymbolCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='symbol'))
def text_path(self, *args, **kwargs):
from .elements.svg_elements import TextPath
return TextPath(self, dict(self._extract_selector(*args, **kwargs), tag_name='textPath'))
def text_paths(self, *args, **kwargs):
from .elements.svg_elements import TextPathCollection
return TextPathCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='textPath'))
def tspan(self, *args, **kwargs):
from .elements.svg_elements import TSpan
return TSpan(self, dict(self._extract_selector(*args, **kwargs), tag_name='tspan'))
def tspans(self, *args, **kwargs):
from .elements.svg_elements import TSpanCollection
return TSpanCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='tspan'))
def view(self, *args, **kwargs):
from .elements.svg_elements import View
return View(self, dict(self._extract_selector(*args, **kwargs), tag_name='view'))
def views(self, *args, **kwargs):
from .elements.svg_elements import ViewCollection
return ViewCollection(self, dict(self._extract_selector(*args, **kwargs),
tag_name='view'))
| 47.979892
| 100
| 0.631726
| 6,682
| 62,038
| 5.694104
| 0.070638
| 0.149022
| 0.141821
| 0.186606
| 0.810792
| 0.803958
| 0.795968
| 0.789555
| 0.625131
| 0.612358
| 0
| 0.000685
| 0.246591
| 62,038
| 1,292
| 101
| 48.017028
| 0.81335
| 0.003014
| 0
| 0.17002
| 0
| 0
| 0.024577
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.28672
| 0
| 0.865191
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
8a5361b8129cbfe6d988ee96255a9dc06ccfb582
| 566,586
|
py
|
Python
|
src/genie/libs/parser/junos/tests/test_show_interface.py
|
danielgraziano/genieparser
|
74d5e1ded9794561af1ac3284307c58365617673
|
[
"Apache-2.0"
] | 1
|
2020-12-01T00:45:34.000Z
|
2020-12-01T00:45:34.000Z
|
src/genie/libs/parser/junos/tests/test_show_interface.py
|
dalwar23/genieparser
|
a9df45d3ee23f107bfb55915068e90782f92fc99
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/junos/tests/test_show_interface.py
|
dalwar23/genieparser
|
a9df45d3ee23f107bfb55915068e90782f92fc99
|
[
"Apache-2.0"
] | 2
|
2021-02-12T21:42:30.000Z
|
2021-02-12T21:47:51.000Z
|
import unittest
from unittest.mock import Mock
from pyats.topology import Device
from genie.metaparser.util.exceptions import (
SchemaEmptyParserError,
SchemaMissingKeyError,
)
from genie.libs.parser.junos.show_interface import (ShowInterfacesTerse,
ShowInterfacesTerseMatch,
ShowInterfacesDescriptions,
ShowInterfaces,
ShowInterfacesPolicersInterface,
ShowInterfacesStatistics,
ShowInterfacesQueue,
ShowInterfacesExtensive)
#############################################################################
# unitest For show interfaces terse [| match <interface>]
#############################################################################
class test_show_interfaces_terse(unittest.TestCase):
device = Device(name="aDevice")
empty_output = {"execute.return_value": ""}
golden_parsed_output = {
"em1": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
},
"em1.0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {
"inet": {
"10.0.0.4/8": {"local": "10.0.0.4/8"},
"172.16.64.1/2": {"local": "172.16.64.1/2"},
"172.16.64.4/2": {"local": "172.16.64.4/2"},
},
"inet6": {
"fe80::250:56ff:fe82:ba52/64": {
"local": "fe80::250:56ff:fe82:ba52/64"
},
"2001:db8:8d82:0:a::4/64": {"local": "2001:db8:8d82:0:a::4/64"},
},
"tnp": {"0x4": {"local": "0x4"}},
},
},
"fxp0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
},
"fxp0.0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {"inet": {"172.25.192.114/24": {"local": "172.25.192.114/24"}}},
},
"ge-0/0/0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
},
"ge-0/0/0.0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {
"inet": {"10.0.1.1/24": {"local": "10.0.1.1/24"}},
"multiservice": {},
},
},
"ge-0/0/1": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
},
"ge-0/0/1.0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {
"inet": {"10.0.2.1/24": {"local": "10.0.2.1/24"}},
"multiservice": {},
},
},
"ge-0/0/2": {
"admin_state": "up",
"enabled": True,
"link_state": "down",
"oper_status": "down",
},
"lc-0/0/0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
},
"lc-0/0/0.32769": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {"vpls": {}},
},
"lo0.0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {
"inet": {
"10.1.1.1": {"local": "10.1.1.1", "remote": "0/0"},
"10.11.11.11": {"local": "10.11.11.11", "remote": "0/0"},
}
},
},
"lo0.16384": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {
"inet": {"127.0.0.1": {"local": "127.0.0.1", "remote": "0/0"}}
},
},
"lo0.16385": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {"inet": {}},
},
"pfe-0/0/0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
},
"pfe-0/0/0.16383": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {"inet": {}, "inet6": {}},
},
"pfh-0/0/0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
},
"pfh-0/0/0.16383": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {"inet": {}},
},
"pfh-0/0/0.16384": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {"inet": {}},
},
}
golden_output = {
"execute.return_value": """
root@junos_vmx1> show interfaces terse
Interface Admin Link Proto Local Remote
ge-0/0/0 up up
ge-0/0/0.0 up up inet 10.0.1.1/24
multiservice
lc-0/0/0 up up
lc-0/0/0.32769 up up vpls
pfe-0/0/0 up up
pfe-0/0/0.16383 up up inet
inet6
pfh-0/0/0 up up
pfh-0/0/0.16383 up up inet
pfh-0/0/0.16384 up up inet
ge-0/0/1 up up
ge-0/0/1.0 up up inet 10.0.2.1/24
multiservice
ge-0/0/2 up down
em1 up up
em1.0 up up inet 10.0.0.4/8
172.16.64.1/2
172.16.64.4/2
inet6 fe80::250:56ff:fe82:ba52/64
2001:db8:8d82:0:a::4/64
tnp 0x4
fxp0 up up
fxp0.0 up up inet 172.25.192.114/24
lo0.0 up up inet 10.1.1.1 --> 0/0
10.11.11.11 --> 0/0
lo0.16384 up up inet 127.0.0.1 --> 0/0
lo0.16385 up up inet
"""
}
golden_output_interface = {
"execute.return_value": """
root@junos_vmx1 > show interfaces em1.0 terse
em1.0 up up inet 10.0.0.4/8
172.16.64.1/2
172.16.64.4/2
inet6 fe80::250:56ff:fe82:ba52/64
2001:db8:8d82:0:a::4/64
tnp 0x4
"""
}
golden_parsed_output_interface = {
"em1.0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {
"inet": {
"10.0.0.4/8": {"local": "10.0.0.4/8"},
"172.16.64.1/2": {"local": "172.16.64.1/2"},
"172.16.64.4/2": {"local": "172.16.64.4/2"},
},
"inet6": {
"fe80::250:56ff:fe82:ba52/64": {
"local": "fe80::250:56ff:fe82:ba52/64"
},
"2001:db8:8d82:0:a::4/64": {"local": "2001:db8:8d82:0:a::4/64"},
},
"tnp": {"0x4": {"local": "0x4"}},
},
}
}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
interface_obj = ShowInterfacesTerse(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
interface_obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
interface_obj = ShowInterfacesTerse(device=self.device)
parsed_output = interface_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden_interface(self):
self.device = Mock(**self.golden_output_interface)
interface_obj = ShowInterfacesTerse(device=self.device)
parsed_output = interface_obj.parse(interface="em1.0")
self.assertEqual(parsed_output, self.golden_parsed_output_interface)
class test_show_interfaces_terse_match(unittest.TestCase):
device = Device(name="aDevice")
empty_output = {"execute.return_value": ""}
golden_parsed_output = {
"fxp0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
},
"fxp0.0": {
"admin_state": "up",
"enabled": True,
"link_state": "up",
"oper_status": "up",
"protocol": {"inet": {"172.25.192.114/24": {"local": "172.25.192.114/24"}}},
},
}
golden_output = {
"execute.return_value": """
root@junos_vmx1> show interfaces terse | match fxp0
fxp0 up up
fxp0.0 up up inet 172.25.192.114/24
"""
}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
interface_obj = ShowInterfacesTerseMatch(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
interface_obj.parse(interface="fxp0")
def test_golden(self):
self.device = Mock(**self.golden_output)
interface_obj = ShowInterfacesTerseMatch(device=self.device)
parsed_output = interface_obj.parse(interface="fxp0")
self.assertEqual(parsed_output, self.golden_parsed_output)
class TestShowInterfacesDescriptions(unittest.TestCase):
device = Device(name="aDevice")
empty_output = {"execute.return_value": ""}
maxDiff = None
golden_parsed_output = {
"interface-information": {
"physical-interface": [
{
"admin-status": "up",
"description": "none/100G/in/hktGCS002_ge-0/0/0",
"name": "ge-0/0/0",
"oper-status": "up",
},
{
"admin-status": "up",
"description": "YW7079/9.6G/BB/sjkGDS221-EC11_xe-0/1/5[SJC]_Area8_Cost100",
"name": "ge-0/0/1",
"oper-status": "up",
},
{
"admin-status": "up",
"description": "ve-hkgasr01_Gi2[DefaultCost1000]",
"name": "ge-0/0/2",
"oper-status": "up",
},
]
}
}
golden_output = {
"execute.return_value": """
show interfaces descriptions
Interface Admin Link Description
ge-0/0/0 up up none/100G/in/hktGCS002_ge-0/0/0
ge-0/0/1 up up YW7079/9.6G/BB/sjkGDS221-EC11_xe-0/1/5[SJC]_Area8_Cost100
ge-0/0/2 up up ve-hkgasr01_Gi2[DefaultCost1000]
"""
}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
interface_obj = ShowInterfacesDescriptions(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
interface_obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
interface_obj = ShowInterfacesDescriptions(device=self.device)
parsed_output = interface_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
class TestShowInterfaces(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output = {
"interface-information": {
"physical-interface": [
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:56:b6",
"description": "none/100G/in/hktGCS002_ge-0/0/0",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:56:b6",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:19 UTC (29w6d 18:56 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-no-redirects": True,
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-broadcast": "10.189.5.95",
"ifa-destination": "10.189.5.92/30",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "10.189.5.93"
},
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "2001:db8:223c:2c16::/64",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "2001:db8:223c:2c16::1"
},
{
"ifa-destination": "fe80::/64",
"ifa-flags": {
"ifaf-is-preferred": True
},
"ifa-local": "fe80::250:56ff:feff:56b6"
}
],
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "mpls",
"maximum-labels": "3",
"mtu": "1488"
},
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "multiservice",
"mtu": "Unlimited"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "333",
"name": "ge-0/0/0.0",
"snmp-index": "606",
"traffic-statistics": {
"input-packets": "133657033",
"output-packets": "129243982"
}
}
],
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/0",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "2952",
"input-pps": "5",
"output-bps": "3080",
"output-pps": "3"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "vpls",
"mtu": "Unlimited"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "329",
"logical-interface-bandwidth": "0",
"name": "lc-0/0/0.32769",
"snmp-index": "520",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
],
"name": "lc-0/0/0",
"speed": "800mbps",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet6",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "330",
"logical-interface-bandwidth": "0",
"name": "pfe-0/0/0.16383",
"snmp-index": "523",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
],
"name": "pfe-0/0/0",
"speed": "800mbps",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "331",
"logical-interface-bandwidth": "0",
"name": "pfh-0/0/0.16383",
"snmp-index": "524",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "332",
"logical-interface-bandwidth": "0",
"name": "pfh-0/0/0.16384",
"snmp-index": "525",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
],
"name": "pfh-0/0/0",
"speed": "800mbps",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:37:f9",
"description": "YW7079/9.6G/BB/sjkGDS221-EC11_xe-0/1/5[SJC]_Area8_Cost100",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:37:f9",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:19 UTC (29w6d 18:56 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-no-redirects": True,
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-broadcast": "10.169.14.123",
"ifa-destination": "10.169.14.120/30",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "10.169.14.122"
},
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "2001:db8:eb18:6337::/64",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "2001:db8:eb18:6337::2"
},
{
"ifa-destination": "fe80::/64",
"ifa-flags": {
"ifaf-is-preferred": True
},
"ifa-local": "fe80::250:56ff:feff:37f9"
}
],
"intf-curr-cnt": "2",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-name": "mpls",
"maximum-labels": "3",
"mtu": "1488"
},
{
"address-family-name": "multiservice",
"mtu": "Unlimited"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "334",
"name": "ge-0/0/1.0",
"snmp-index": "605",
"traffic-statistics": {
"input-packets": "376821627",
"output-packets": "370477594"
}
}
],
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/1",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "3696",
"input-pps": "6",
"output-bps": "7736",
"output-pps": "9"
}
},
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:1e:ba",
"description": "ve-hkgasr01_Gi2[DefaultCost1000]",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:1e:ba",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2020-03-05 16:04:34 UTC (2w6d 12:00 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-broadcast": "10.19.198.27",
"ifa-destination": "10.19.198.24/30",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "10.19.198.25"
},
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-name": "mpls",
"maximum-labels": "3",
"mtu": "1488"
},
{
"address-family-name": "multiservice",
"mtu": "Unlimited"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "336",
"name": "ge-0/0/2.0",
"snmp-index": "536",
"traffic-statistics": {
"input-packets": "210359939",
"output-packets": "222589463"
}
}
],
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/2",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "928",
"input-pps": "1",
"output-bps": "800",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:93:cb",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:93:cb",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-10-25 08:50:18 UTC (21w5d 19:15 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-broadcast": "10.55.0.255",
"ifa-destination": "100.0.0/24",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "10.55.0.254"
},
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-name": "multiservice",
"mtu": "Unlimited"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "335",
"name": "ge-0/0/3.0",
"snmp-index": "537",
"traffic-statistics": {
"input-packets": "14609",
"output-packets": "17416"
}
}
],
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/3",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:3e:28",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:3e:28",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/4",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:1d",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:1d",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/5",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:1e",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:1e",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/6",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:1f",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:1f",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/7",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:20",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:20",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/8",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:21",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:21",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/9",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:01:29",
"hardware-physical-address": "2c:6b:f5:ff:01:29",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Ethernet",
"link-type": "Full-Duplex",
"mtu": "9192",
"name": "cbp0",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"name": "demux0",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"name": "dsc",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "00:50:56:ff:e2:c1",
"hardware-physical-address": "00:50:56:ff:e2:c1",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"interface-flapped": {
"#text": "2019-08-29 09:03:11 UTC (29w6d 19:02 ago)"
},
"link-level-type": "Ethernet",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet",
"interface-address": [
{
"ifa-broadcast": "10.255.255.255",
"ifa-destination": "10/8",
"ifa-flags": {
"ifaf-is-preferred": True
},
"ifa-local": "10.0.0.4"
},
{
"ifa-broadcast": "172.16.16.255",
"ifa-destination": "128/2",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-kernel": True,
"ifaf-preferred": True
},
"ifa-local": "172.16.64.1"
},
{
"ifa-broadcast": "172.16.16.255",
"ifa-destination": "128/2",
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-primary": True,
"ifaf-primary": True
},
"ifa-local": "172.16.64.4"
}
],
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "fe80::/64",
"ifa-flags": {
"ifaf-is-preferred": True
},
"ifa-local": "fe80::250:56ff:feff:e2c1"
},
{
"ifa-destination": "2001:db8:8d82::/64",
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "0x4"
}
],
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-flags": {
"ifff-is-primary": True,
"ifff-primary": True
},
"address-family-name": "tnp",
"mtu": "1500"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4000000"
},
"local-index": "3",
"name": "em1.0",
"snmp-index": "24",
"traffic-statistics": {
"input-packets": "724625563",
"output-packets": "793953088"
}
}
],
"mtu": "1514",
"name": "em1",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "VxLAN-Tunnel-Endpoint",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "esi",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "fti0",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "fti1",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "fti2",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "fti3",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "fti4",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "fti5",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "fti6",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "fti7",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "00:50:56:ff:0a:95",
"hardware-physical-address": "00:50:56:ff:0a:95",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"interface-flapped": {
"#text": "2019-08-29 09:03:11 UTC (29w6d 19:02 ago)"
},
"link-level-type": "Ethernet",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True,
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-broadcast": "10.1.0.255",
"ifa-destination": "1.0.0/24",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "10.1.0.101"
},
"intf-curr-cnt": "2",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4000000"
},
"local-index": "4",
"name": "fxp0.0",
"snmp-index": "13",
"traffic-statistics": {
"input-packets": "563129",
"output-packets": "805208"
}
}
],
"mtu": "1514",
"name": "fxp0",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "GRE",
"mtu": "Unlimited",
"name": "gre",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "IP-over-IP",
"mtu": "Unlimited",
"name": "ipip",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:08:09",
"hardware-physical-address": "2c:6b:f5:ff:08:09",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Ethernet",
"link-type": "Full-Duplex",
"mtu": "1514",
"name": "irb",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:08:d8",
"hardware-physical-address": "2c:6b:f5:ff:08:d8",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Ethernet",
"link-type": "Full-Duplex",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-broadcast": "172.16.16.255",
"ifa-destination": "128/2",
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-preferred": True,
"ifaf-is-primary": True,
"ifaf-primary": True
},
"ifa-local": "172.16.64.127"
},
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1514",
"new-hold-limit": "75000"
}
],
"encapsulation": "unknown",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x24004000"
},
"local-index": "325",
"logical-interface-bandwidth": "1Gbps",
"name": "jsrv.1",
"snmp-index": "514",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
],
"mtu": "1514",
"name": "jsrv",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-loopback": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-no-redirects": True,
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-primary": True
},
"ifa-local": "10.189.5.252"
},
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
},
{
"address-family-name": "inet6",
"interface-address": {
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-primary": True
},
"ifa-local": "127.0.0.1"
},
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "320",
"name": "lo0.0",
"snmp-index": "16",
"traffic-statistics": {
"input-packets": "83",
"output-packets": "83"
}
},
{
"address-family": [
{
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "322",
"name": "lo0.16384",
"snmp-index": "21",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"address-family": [
{
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "321",
"name": "lo0.16385",
"snmp-index": "22",
"traffic-statistics": {
"input-packets": "33920495",
"output-packets": "33920495"
}
}
],
"name": "lo0",
"traffic-statistics": {
"input-packets": "33920578",
"output-packets": "33920578"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "LSI",
"mtu": "Unlimited",
"name": "lsi",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "GRE",
"mtu": "Unlimited",
"name": "mtun",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "PIM-Decapsulator",
"mtu": "Unlimited",
"name": "pimd",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "PIM-Encapsulator",
"mtu": "Unlimited",
"name": "pime",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:08:c8",
"hardware-physical-address": "2c:6b:f5:ff:08:c8",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Ethernet",
"link-type": "Full-Duplex",
"mtu": "9192",
"name": "pip0",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"link-level-type": "PPPoE",
"link-type": "Full-Duplex",
"mtu": "1532",
"name": "pp0"
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Remote-BEB",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "rbeb",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Interface-Specific",
"mtu": "Unlimited",
"name": "tap",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "VxLAN-Tunnel-Endpoint",
"link-type": "Full-Duplex",
"mtu": "Unlimited",
"name": "vtep",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
]
}
}
golden_output = {
"execute.return_value": """
show interfaces
Physical interface: ge-0/0/0, Enabled, Physical link is Up
Interface index: 148, SNMP ifIndex: 526
Description: none/100G/in/hktGCS002_ge-0/0/0
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 00:50:56:ff:56:b6, Hardware address: 00:50:56:ff:56:b6
Last flapped : 2019-08-29 09:09:19 UTC (29w6d 18:56 ago)
Input rate : 2952 bps (5 pps)
Output rate : 3080 bps (3 pps)
Active alarms : None
Active defects : None
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Logical interface ge-0/0/0.0 (Index 333) (SNMP ifIndex 606)
Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
Input packets : 133657033
Output packets: 129243982
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Flags: No-Redirects, Sendbcast-pkt-to-re
Addresses, Flags: Is-Preferred Is-Primary
Destination: 10.189.5.92/30, Local: 10.189.5.93, Broadcast: 10.189.5.95
Protocol inet6, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Flags: Is-Primary
Addresses, Flags: Is-Preferred Is-Primary
Destination: 2001:db8:223c:2c16::/64, Local: 2001:db8:223c:2c16::1
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::250:56ff:feff:56b6
Protocol mpls, MTU: 1488, Maximum labels: 3
Flags: Is-Primary
Protocol multiservice, MTU: Unlimited
Flags: Is-Primary
Physical interface: lc-0/0/0, Enabled, Physical link is Up
Interface index: 145, SNMP ifIndex: 519
Speed: 800mbps
Device flags : Present Running
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Logical interface lc-0/0/0.32769 (Index 329) (SNMP ifIndex 520)
Flags: Up Encapsulation: ENET2
Bandwidth: 0
Input packets : 0
Output packets: 0
Protocol vpls, MTU: Unlimited
Flags: Is-Primary
Physical interface: pfe-0/0/0, Enabled, Physical link is Up
Interface index: 147, SNMP ifIndex: 522
Speed: 800mbps
Device flags : Present Running
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Logical interface pfe-0/0/0.16383 (Index 330) (SNMP ifIndex 523)
Flags: Up SNMP-Traps Encapsulation: ENET2
Bandwidth: 0
Input packets : 0
Output packets: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Flags: None
Protocol inet6, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Flags: None
Physical interface: pfh-0/0/0, Enabled, Physical link is Up
Interface index: 146, SNMP ifIndex: 521
Speed: 800mbps
Device flags : Present Running
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Logical interface pfh-0/0/0.16383 (Index 331) (SNMP ifIndex 524)
Flags: Up SNMP-Traps Encapsulation: ENET2
Bandwidth: 0
Input packets : 0
Output packets: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Flags: None
Logical interface pfh-0/0/0.16384 (Index 332) (SNMP ifIndex 525)
Flags: Up SNMP-Traps Encapsulation: ENET2
Bandwidth: 0
Input packets : 0
Output packets: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Flags: Is-Primary
Physical interface: ge-0/0/1, Enabled, Physical link is Up
Interface index: 149, SNMP ifIndex: 527
Description: YW7079/9.6G/BB/sjkGDS221-EC11_xe-0/1/5[SJC]_Area8_Cost100
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 00:50:56:ff:37:f9, Hardware address: 00:50:56:ff:37:f9
Last flapped : 2019-08-29 09:09:19 UTC (29w6d 18:56 ago)
Input rate : 3696 bps (6 pps)
Output rate : 7736 bps (9 pps)
Active alarms : None
Active defects : None
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Logical interface ge-0/0/1.0 (Index 334) (SNMP ifIndex 605)
Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
Input packets : 376821627
Output packets: 370477594
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Flags: No-Redirects, Sendbcast-pkt-to-re
Addresses, Flags: Is-Preferred Is-Primary
Destination: 10.169.14.120/30, Local: 10.169.14.122, Broadcast: 10.169.14.123
Protocol inet6, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 2, Curr new hold cnt: 0, NH drop cnt: 0
Addresses, Flags: Is-Preferred Is-Primary
Destination: 2001:db8:eb18:6337::/64, Local: 2001:db8:eb18:6337::2
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::250:56ff:feff:37f9
Protocol mpls, MTU: 1488, Maximum labels: 3
Protocol multiservice, MTU: Unlimited
Physical interface: ge-0/0/2, Enabled, Physical link is Up
Interface index: 150, SNMP ifIndex: 528
Description: ve-hkgasr01_Gi2[DefaultCost1000]
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 00:50:56:ff:1e:ba, Hardware address: 00:50:56:ff:1e:ba
Last flapped : 2020-03-05 16:04:34 UTC (2w6d 12:00 ago)
Input rate : 928 bps (1 pps)
Output rate : 800 bps (0 pps)
Active alarms : None
Active defects : None
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Logical interface ge-0/0/2.0 (Index 336) (SNMP ifIndex 536)
Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
Input packets : 210359939
Output packets: 222589463
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Flags: Sendbcast-pkt-to-re
Addresses, Flags: Is-Preferred Is-Primary
Destination: 10.19.198.24/30, Local: 10.19.198.25, Broadcast: 10.19.198.27
Protocol mpls, MTU: 1488, Maximum labels: 3
Protocol multiservice, MTU: Unlimited
Physical interface: ge-0/0/3, Enabled, Physical link is Up
Interface index: 151, SNMP ifIndex: 529
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 00:50:56:ff:93:cb, Hardware address: 00:50:56:ff:93:cb
Last flapped : 2019-10-25 08:50:18 UTC (21w5d 19:15 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Active alarms : None
Active defects : None
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Logical interface ge-0/0/3.0 (Index 335) (SNMP ifIndex 537)
Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
Input packets : 14609
Output packets: 17416
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Flags: Sendbcast-pkt-to-re
Addresses, Flags: Is-Preferred Is-Primary
Destination: 100.0.0/24, Local: 10.55.0.254, Broadcast: 10.55.0.255
Protocol multiservice, MTU: Unlimited
Physical interface: ge-0/0/4, Enabled, Physical link is Down
Interface index: 152, SNMP ifIndex: 530
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 00:50:56:ff:3e:28, Hardware address: 00:50:56:ff:3e:28
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 18:55 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Physical interface: ge-0/0/5, Enabled, Physical link is Down
Interface index: 153, SNMP ifIndex: 531
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 2c:6b:f5:ff:01:1d, Hardware address: 2c:6b:f5:ff:01:1d
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 18:55 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Physical interface: ge-0/0/6, Enabled, Physical link is Down
Interface index: 154, SNMP ifIndex: 532
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 2c:6b:f5:ff:01:1e, Hardware address: 2c:6b:f5:ff:01:1e
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 18:55 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Physical interface: ge-0/0/7, Enabled, Physical link is Down
Interface index: 155, SNMP ifIndex: 533
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 2c:6b:f5:ff:01:1f, Hardware address: 2c:6b:f5:ff:01:1f
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 18:55 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Physical interface: ge-0/0/8, Enabled, Physical link is Down
Interface index: 156, SNMP ifIndex: 534
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 2c:6b:f5:ff:01:20, Hardware address: 2c:6b:f5:ff:01:20
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 18:55 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Physical interface: ge-0/0/9, Enabled, Physical link is Down
Interface index: 157, SNMP ifIndex: 535
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 2c:6b:f5:ff:01:21, Hardware address: 2c:6b:f5:ff:01:21
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 18:55 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
Interface transmit statistics: Disabled
Physical interface: cbp0, Enabled, Physical link is Up
Interface index: 129, SNMP ifIndex: 501
Type: Ethernet, Link-level type: Ethernet, MTU: 9192
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Current address: 2c:6b:f5:ff:01:29, Hardware address: 2c:6b:f5:ff:01:29
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: demux0, Enabled, Physical link is Up
Interface index: 128, SNMP ifIndex: 502
Type: Software-Pseudo, MTU: 9192, Clocking: 1
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: dsc, Enabled, Physical link is Up
Interface index: 5, SNMP ifIndex: 5
Type: Software-Pseudo, MTU: Unlimited
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: em1, Enabled, Physical link is Up
Interface index: 65, SNMP ifIndex: 23
Type: Ethernet, Link-level type: Ethernet, MTU: 1514
Device flags : Present Running
Interface flags: SNMP-Traps
Current address: 00:50:56:ff:e2:c1, Hardware address: 00:50:56:ff:e2:c1
Last flapped : 2019-08-29 09:03:11 UTC (29w6d 19:02 ago)
Input packets : 0
Output packets: 0
Logical interface em1.0 (Index 3) (SNMP ifIndex 24)
Flags: Up SNMP-Traps 0x4000000 Encapsulation: ENET2
Input packets : 724625563
Output packets: 793953088
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Flags: Is-Primary
Addresses, Flags: Is-Preferred
Destination: 10/8, Local: 10.0.0.4, Broadcast: 10.255.255.255
Addresses, Flags: Preferred Kernel Is-Preferred
Destination: 128/2, Local: 172.16.64.1, Broadcast: 172.16.16.255
Addresses, Flags: Primary Is-Default Is-Primary
Destination: 128/2, Local: 172.16.64.4, Broadcast: 172.16.16.255
Protocol inet6, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Flags: Is-Primary
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::250:56ff:feff:e2c1
Addresses, Flags: Is-Default Is-Preferred Is-Primary
Destination: 2001:db8:8d82::/64, Local: 2001:db8:8d82::a:0:0:4
Protocol tnp, MTU: 1500
Flags: Primary, Is-Primary
Addresses
Local: 0x4
Physical interface: esi, Enabled, Physical link is Up
Interface index: 134, SNMP ifIndex: 503
Type: Software-Pseudo, Link-level type: VxLAN-Tunnel-Endpoint, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: fti0, Enabled, Physical link is Up
Interface index: 136, SNMP ifIndex: 504
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: fti1, Enabled, Physical link is Up
Interface index: 137, SNMP ifIndex: 505
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: fti2, Enabled, Physical link is Up
Interface index: 138, SNMP ifIndex: 506
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: fti3, Enabled, Physical link is Up
Interface index: 139, SNMP ifIndex: 507
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: fti4, Enabled, Physical link is Up
Interface index: 140, SNMP ifIndex: 508
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: fti5, Enabled, Physical link is Up
Interface index: 141, SNMP ifIndex: 509
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: fti6, Enabled, Physical link is Up
Interface index: 142, SNMP ifIndex: 510
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: fti7, Enabled, Physical link is Up
Interface index: 143, SNMP ifIndex: 511
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: fxp0, Enabled, Physical link is Up
Interface index: 64, SNMP ifIndex: 1
Type: Ethernet, Link-level type: Ethernet, MTU: 1514
Device flags : Present Running
Interface flags: SNMP-Traps
Current address: 00:50:56:ff:0a:95, Hardware address: 00:50:56:ff:0a:95
Last flapped : 2019-08-29 09:03:11 UTC (29w6d 19:02 ago)
Input packets : 0
Output packets: 0
Logical interface fxp0.0 (Index 4) (SNMP ifIndex 13)
Flags: Up SNMP-Traps 0x4000000 Encapsulation: ENET2
Input packets : 563129
Output packets: 805208
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 2, Curr new hold cnt: 0, NH drop cnt: 0
Flags: Sendbcast-pkt-to-re, Is-Primary
Addresses, Flags: Is-Preferred Is-Primary
Destination: 1.0.0/24, Local: 10.1.0.101, Broadcast: 10.1.0.255
Physical interface: gre, Enabled, Physical link is Up
Interface index: 10, SNMP ifIndex: 8
Type: GRE, Link-level type: GRE, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Input packets : 0
Output packets: 0
Physical interface: ipip, Enabled, Physical link is Up
Interface index: 11, SNMP ifIndex: 9
Type: IPIP, Link-level type: IP-over-IP, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Input packets : 0
Output packets: 0
Physical interface: irb, Enabled, Physical link is Up
Interface index: 132, SNMP ifIndex: 512
Type: Ethernet, Link-level type: Ethernet, MTU: 1514
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Current address: 2c:6b:f5:ff:08:09, Hardware address: 2c:6b:f5:ff:08:09
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: jsrv, Enabled, Physical link is Up
Interface index: 144, SNMP ifIndex: 513
Type: Ethernet, Link-level type: Ethernet, MTU: 1514
Device flags : Present Running
Link type : Full-Duplex
Link flags : None
Current address: 2c:6b:f5:ff:08:d8, Hardware address: 2c:6b:f5:ff:08:d8
Last flapped : Never
Input packets : 0
Output packets: 0
Logical interface jsrv.1 (Index 325) (SNMP ifIndex 514)
Flags: Up 0x24004000 Encapsulation: unknown
Bandwidth: 1Gbps
Routing Instance: None Bridging Domain: None
Input packets : 0
Output packets: 0
Protocol inet, MTU: 1514
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Flags: Is-Primary
Addresses, Flags: Primary Is-Default Is-Preferred Is-Primary
Destination: 128/2, Local: 172.16.64.127, Broadcast: 172.16.16.255
Physical interface: lo0, Enabled, Physical link is Up
Interface index: 6, SNMP ifIndex: 6
Type: Loopback, MTU: Unlimited
Device flags : Present Running Loopback
Interface flags: SNMP-Traps
Link flags : None
Last flapped : Never
Input packets : 33920578
Output packets: 33920578
Logical interface lo0.0 (Index 320) (SNMP ifIndex 16)
Flags: SNMP-Traps Encapsulation: Unspecified
Input packets : 83
Output packets: 83
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Flags: No-Redirects, Sendbcast-pkt-to-re
Addresses, Flags: Is-Default Is-Primary
Local: 10.189.5.252
Protocol inet6, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Addresses, Flags: Is-Default Is-Primary
Local: 2001:db8:223c:ca45::b
Local: fe80::250:560f:fc8d:7c08
Logical interface lo0.16384 (Index 322) (SNMP ifIndex 21)
Flags: SNMP-Traps Encapsulation: Unspecified
Input packets : 0
Output packets: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Addresses
Local: 127.0.0.1
Logical interface lo0.16385 (Index 321) (SNMP ifIndex 22)
Flags: SNMP-Traps Encapsulation: Unspecified
Input packets : 33920495
Output packets: 33920495
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Physical interface: lsi, Enabled, Physical link is Up
Interface index: 4, SNMP ifIndex: 4
Type: Software-Pseudo, Link-level type: LSI, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: mtun, Enabled, Physical link is Up
Interface index: 66, SNMP ifIndex: 12
Type: Multicast-GRE, Link-level type: GRE, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Input packets : 0
Output packets: 0
Physical interface: pimd, Enabled, Physical link is Up
Interface index: 26, SNMP ifIndex: 11
Type: PIMD, Link-level type: PIM-Decapsulator, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Input packets : 0
Output packets: 0
Physical interface: pime, Enabled, Physical link is Up
Interface index: 25, SNMP ifIndex: 10
Type: PIME, Link-level type: PIM-Encapsulator, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Input packets : 0
Output packets: 0
Physical interface: pip0, Enabled, Physical link is Up
Interface index: 130, SNMP ifIndex: 515
Type: Ethernet, Link-level type: Ethernet, MTU: 9192
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Current address: 2c:6b:f5:ff:08:c8, Hardware address: 2c:6b:f5:ff:08:c8
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: pp0, Enabled, Physical link is Up
Interface index: 131, SNMP ifIndex: 516
Type: PPPoE, Link-level type: PPPoE, MTU: 1532
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical interface: rbeb, Enabled, Physical link is Up
Interface index: 135, SNMP ifIndex: 517
Type: Software-Pseudo, Link-level type: Remote-BEB, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: tap, Enabled, Physical link is Up
Interface index: 12, SNMP ifIndex: 7
Type: Software-Pseudo, Link-level type: Interface-Specific, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
Physical interface: vtep, Enabled, Physical link is Up
Interface index: 133, SNMP ifIndex: 518
Type: Software-Pseudo, Link-level type: VxLAN-Tunnel-Endpoint, MTU: Unlimited, Speed: Unlimited
Device flags : Present Running
Link type : Full-Duplex
Link flags : None
Last flapped : Never
Input packets : 0
Output packets: 0
"""
}
golden_output_2 = {
"execute.return_value": """
show interfaces extensive
Physical interface: ge-0/0/0, Enabled, Physical link is Up
Interface index: 148, SNMP ifIndex: 526, Generation: 151
Description: none/100G/in/hktGCS002_ge-0/0/0
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 2000 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 00:50:56:ff:56:b6, Hardware address: 00:50:56:ff:56:b6
Last flapped : 2019-08-29 09:09:19 UTC (29w6d 22:19 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 19732539397 3152 bps
Output bytes : 16367814635 3160 bps
Input packets: 133726363 5 pps
Output packets: 129306863 4 pps
IPv6 transit statistics:
Input bytes : 737203554
Output bytes : 1018758352
Input packets: 7541948
Output packets: 6986863
Label-switched interface (LSI) traffic statistics:
Input bytes : 0 0 bps
Input packets: 0 0 pps
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 1, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : None
Active defects : None
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 21604601324 16828244544
Total packets 133726919 129183374
Unicast packets 133726908 129183361
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 133726908
Input packet rejects 118
Input DA rejects 60
Input SA rejects 0
Output packet count 129183361
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Logical interface ge-0/0/0.0 (Index 333) (SNMP ifIndex 606) (Generation 142)
Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
Traffic statistics:
Input bytes : 19732539397
Output bytes : 15997705213
Input packets: 133726363
Output packets: 129306864
IPv6 transit statistics:
Input bytes : 737203554
Output bytes : 1018758352
Input packets: 7541948
Output packets: 6986863
Local statistics:
Input bytes : 12676733166
Output bytes : 11303933633
Input packets: 63558712
Output packets: 61684919
Transit statistics:
Input bytes : 7055806231 3152 bps
Output bytes : 4693771580 816 bps
Input packets: 70167651 5 pps
Output packets: 67621945 1 pps
IPv6 transit statistics:
Input bytes : 737203554 1856 bps
Output bytes : 1018758352 0 bps
Input packets: 7541948 2 pps
Output packets: 6986863 0 pps
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 160, Route table: 0
Flags: No-Redirects, Sendbcast-pkt-to-re
Input Filters: catch_all
Addresses, Flags: Is-Preferred Is-Primary
Destination: 10.189.5.92/30, Local: 10.189.5.93, Broadcast: 10.189.5.95, Generation: 146
Protocol inet6, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 161, Route table: 0
Flags: Is-Primary
Input Filters: v6_catch_all
Addresses, Flags: Is-Preferred Is-Primary
Destination: 2001:db8:223c:2c16::/64, Local: 2001:db8:223c:2c16::1
Generation: 148
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::250:56ff:feff:56b6
Protocol mpls, MTU: 1488, Maximum labels: 3, Generation: 150
Generation: 162, Route table: 0
Flags: Is-Primary
Protocol multiservice, MTU: Unlimited, Generation: 163, Route table: 0
Flags: Is-Primary
Policer: Input: __default_arp_policer__
Physical interface: lc-0/0/0, Enabled, Physical link is Up
Interface index: 145, SNMP ifIndex: 519, Generation: 148
Type: Unspecified, Link-level type: Unspecified, MTU: 0, Clocking: Unspecified, Speed: 800mbps
Device flags : Present Running
Link type : Unspecified
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Logical interface lc-0/0/0.32769 (Index 329) (SNMP ifIndex 520) (Generation 138)
Flags: Up Encapsulation: ENET2
Bandwidth: 0
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Local statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
Protocol vpls, MTU: Unlimited, Generation: 155, Route table: 1
Flags: Is-Primary
Physical interface: pfe-0/0/0, Enabled, Physical link is Up
Interface index: 147, SNMP ifIndex: 522, Generation: 150
Type: Unspecified, Link-level type: Unspecified, MTU: 0, Clocking: Unspecified, Speed: 800mbps
Device flags : Present Running
Link type : Unspecified
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Logical interface pfe-0/0/0.16383 (Index 330) (SNMP ifIndex 523) (Generation 139)
Flags: Up SNMP-Traps Encapsulation: ENET2
Bandwidth: 0
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Local statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
IPv6 transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 156, Route table: 1
Flags: None
Protocol inet6, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 157, Route table: 1
Flags: None
Physical interface: pfh-0/0/0, Enabled, Physical link is Up
Interface index: 146, SNMP ifIndex: 521, Generation: 149
Type: Unspecified, Link-level type: Unspecified, MTU: 0, Clocking: Unspecified, Speed: 800mbps
Device flags : Present Running
Link type : Unspecified
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Logical interface pfh-0/0/0.16383 (Index 331) (SNMP ifIndex 524) (Generation 140)
Flags: Up SNMP-Traps Encapsulation: ENET2
Bandwidth: 0
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Local statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 158, Route table: 1
Flags: None
Logical interface pfh-0/0/0.16384 (Index 332) (SNMP ifIndex 525) (Generation 141)
Flags: Up SNMP-Traps Encapsulation: ENET2
Bandwidth: 0
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Local statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 159, Route table: 2
Flags: Is-Primary
Physical interface: ge-0/0/1, Enabled, Physical link is Up
Interface index: 149, SNMP ifIndex: 527, Generation: 152
Description: YW7079/9.6G/BB/sjkGDS221-EC11_xe-0/1/5[SJC]_Area8_Cost100
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 2000 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 00:50:56:ff:37:f9, Hardware address: 00:50:56:ff:37:f9
Last flapped : 2019-08-29 09:09:19 UTC (29w6d 22:19 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 34950288700 5304 bps
Output bytes : 42783271407 8016 bps
Input packets: 376916510 9 pps
Output packets: 370594612 9 pps
IPv6 transit statistics:
Input bytes : 3303092203
Output bytes : 3127179954
Input packets: 41039648
Output packets: 41594426
Label-switched interface (LSI) traffic statistics:
Input bytes : 0 0 bps
Input packets: 0 0 pps
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 1, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : None
Active defects : None
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 40247994921 45995779695
Total packets 376916517 370414748
Unicast packets 376916499 370414722
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 376916499
Input packet rejects 41
Input DA rejects 4
Input SA rejects 0
Output packet count 370414722
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Logical interface ge-0/0/1.0 (Index 334) (SNMP ifIndex 605) (Generation 143)
Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
Traffic statistics:
Input bytes : 34950288700
Output bytes : 42238503795
Input packets: 376916510
Output packets: 370594612
IPv6 transit statistics:
Input bytes : 3303092203
Output bytes : 3127179954
Input packets: 41039648
Output packets: 41594426
Local statistics:
Input bytes : 13617655381
Output bytes : 18694395654
Input packets: 85070342
Output packets: 90794602
Transit statistics:
Input bytes : 21332633319 3368 bps
Output bytes : 23544108141 2144 bps
Input packets: 291846168 6 pps
Output packets: 279800010 4 pps
IPv6 transit statistics:
Input bytes : 3303092203 3360 bps
Output bytes : 3127179954 1136 bps
Input packets: 41039648 5 pps
Output packets: 41594426 1 pps
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 164, Route table: 0
Flags: No-Redirects, Sendbcast-pkt-to-re
Input Filters: catch_all
Addresses, Flags: Is-Preferred Is-Primary
Destination: 10.169.14.120/30, Local: 10.169.14.122, Broadcast: 10.169.14.123, Generation: 152
Protocol inet6, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 2, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 165, Route table: 0
Input Filters: v6_catch_all
Addresses, Flags: Is-Preferred Is-Primary
Destination: 2001:db8:eb18:6337::/64, Local: 2001:db8:eb18:6337::2
Generation: 154
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::250:56ff:feff:37f9
Protocol mpls, MTU: 1488, Maximum labels: 3, Generation: 156
Protocol multiservice, MTU: Unlimited, Generation: 166, Route table: 0
Generation: 167, Route table: 0
Policer: Input: __default_arp_policer__
Physical interface: ge-0/0/2, Enabled, Physical link is Up
Interface index: 150, SNMP ifIndex: 528, Generation: 153
Description: ve-hkgasr01_Gi2[DefaultCost1000]
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 2000 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 00:50:56:ff:1e:ba, Hardware address: 00:50:56:ff:1e:ba
Last flapped : 2020-03-05 16:04:34 UTC (2w6d 15:23 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 34302334175 880 bps
Output bytes : 27932035013 880 bps
Input packets: 248114960 1 pps
Output packets: 229304654 0 pps
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Label-switched interface (LSI) traffic statistics:
Input bytes : 0 0 bps
Input packets: 0 0 pps
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 47, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : None
Active defects : None
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 38187795706 30274309615
Total packets 252983787 229070544
Unicast packets 252983783 229070540
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 252983783
Input packet rejects 335972
Input DA rejects 0
Input SA rejects 0
Output packet count 229070540
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Logical interface ge-0/0/2.0 (Index 336) (SNMP ifIndex 536) (Generation 148)
Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
Traffic statistics:
Input bytes : 31231373218
Output bytes : 27263935504
Input packets: 210377499
Output packets: 222609631
Local statistics:
Input bytes : 11458939228
Output bytes : 13615419042
Input packets: 31742480
Output packets: 28915016
Transit statistics:
Input bytes : 19772433990 880 bps
Output bytes : 13648516462 360 bps
Input packets: 178635019 1 pps
Output packets: 193694615 0 pps
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 179, Route table: 0
Flags: Sendbcast-pkt-to-re
Addresses, Flags: Is-Preferred Is-Primary
Destination: 10.19.198.24/30, Local: 10.19.198.25, Broadcast: 10.19.198.27, Generation: 166
Protocol mpls, MTU: 1488, Maximum labels: 3, Generation: 180, Route table: 0
Protocol multiservice, MTU: Unlimited, Generation: 181, Route table: 0
Policer: Input: __default_arp_policer__
Physical interface: ge-0/0/3, Enabled, Physical link is Up
Interface index: 151, SNMP ifIndex: 529, Generation: 154
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 00:50:56:ff:93:cb, Hardware address: 00:50:56:ff:93:cb
Last flapped : 2019-10-25 08:50:18 UTC (21w5d 22:38 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 1092968 0 bps
Output bytes : 3419965 0 bps
Input packets: 14619 0 pps
Output packets: 17426 0 pps
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 3, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : None
Active defects : None
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 1157295 3441533
Total packets 14683 17425
Unicast packets 14683 17425
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 14683
Input packet rejects 65
Input DA rejects 0
Input SA rejects 0
Output packet count 17425
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Logical interface ge-0/0/3.0 (Index 335) (SNMP ifIndex 537) (Generation 146)
Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
Traffic statistics:
Input bytes : 1092968
Output bytes : 3353155
Input packets: 14619
Output packets: 17426
Local statistics:
Input bytes : 667980
Output bytes : 467670
Input packets: 11133
Output packets: 11135
Transit statistics:
Input bytes : 424988 0 bps
Output bytes : 2885485 0 bps
Input packets: 3486 0 pps
Output packets: 6291 0 pps
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 174, Route table: 0
Flags: Sendbcast-pkt-to-re
Addresses, Flags: Is-Preferred Is-Primary
Destination: 100.0.0/24, Local: 10.55.0.254, Broadcast: 10.55.0.255, Generation: 162
Protocol multiservice, MTU: Unlimited, Generation: 175, Route table: 0
Policer: Input: __default_arp_policer__
Physical interface: ge-0/0/4, Enabled, Physical link is Down
Interface index: 152, SNMP ifIndex: 530, Generation: 155
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 00:50:56:ff:3e:28, Hardware address: 00:50:56:ff:3e:28
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 22:19 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 2, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 0 0
Total packets 0 0
Unicast packets 0 0
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 0
Input packet rejects 0
Input DA rejects 0
Input SA rejects 0
Output packet count 0
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Physical interface: ge-0/0/5, Enabled, Physical link is Down
Interface index: 153, SNMP ifIndex: 531, Generation: 156
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 2c:6b:f5:ff:01:1d, Hardware address: 2c:6b:f5:ff:01:1d
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 22:19 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 2, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 0 0
Total packets 0 0
Unicast packets 0 0
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 0
Input packet rejects 0
Input DA rejects 0
Input SA rejects 0
Output packet count 0
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Physical interface: ge-0/0/6, Enabled, Physical link is Down
Interface index: 154, SNMP ifIndex: 532, Generation: 157
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 2c:6b:f5:ff:01:1e, Hardware address: 2c:6b:f5:ff:01:1e
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 22:19 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 2, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 0 0
Total packets 0 0
Unicast packets 0 0
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 0
Input packet rejects 0
Input DA rejects 0
Input SA rejects 0
Output packet count 0
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Physical interface: ge-0/0/7, Enabled, Physical link is Down
Interface index: 155, SNMP ifIndex: 533, Generation: 158
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 2c:6b:f5:ff:01:1f, Hardware address: 2c:6b:f5:ff:01:1f
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 22:19 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 2, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 0 0
Total packets 0 0
Unicast packets 0 0
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 0
Input packet rejects 0
Input DA rejects 0
Input SA rejects 0
Output packet count 0
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Physical interface: ge-0/0/8, Enabled, Physical link is Down
Interface index: 156, SNMP ifIndex: 534, Generation: 159
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 2c:6b:f5:ff:01:20, Hardware address: 2c:6b:f5:ff:01:20
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 22:19 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 2, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 0 0
Total packets 0 0
Unicast packets 0 0
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 0
Input packet rejects 0
Input DA rejects 0
Input SA rejects 0
Output packet count 0
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Physical interface: ge-0/0/9, Enabled, Physical link is Down
Interface index: 157, SNMP ifIndex: 535, Generation: 160
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running Down
Interface flags: Hardware-Down SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 2c:6b:f5:ff:01:21, Hardware address: 2c:6b:f5:ff:01:21
Last flapped : 2019-08-29 09:09:20 UTC (29w6d 22:19 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 2, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
MTU errors: 0, Resource errors: 0
Active alarms : LINK
Active defects : LINK
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 0 0
Total packets 0 0
Unicast packets 0 0
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 0
Input packet rejects 0
Input DA rejects 0
Input SA rejects 0
Output packet count 0
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
% bps % usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Physical interface: .local., Enabled, Physical link is Up
Interface index: 0, SNMP ifIndex: 0, Generation: 1
Type: Loopback, Link-level type: Interface-Specific, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running Loopback
Interface flags: Point-To-Point
Link type : Unspecified
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Logical interface .local..0 (Index 0) (SNMP ifIndex 0) (Generation 1)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 133, Route table: 0
Flags: None
Addresses, Flags: Is-Primary
Destination: Unspecified, Local: 10.1.0.101, Broadcast: Unspecified, Generation: 133
Addresses, Flags: None
Destination: Unspecified, Local: 10.19.198.25, Broadcast: Unspecified, Generation: 165
Addresses, Flags: None
Destination: Unspecified, Local: 10.55.0.254, Broadcast: Unspecified, Generation: 161
Addresses, Flags: None
Destination: Unspecified, Local: 10.169.14.122, Broadcast: Unspecified, Generation: 151
Addresses, Flags: None
Destination: Unspecified, Local: 10.189.5.93, Broadcast: Unspecified, Generation: 145
Addresses, Flags: None
Destination: Unspecified, Local: 10.189.5.252, Broadcast: Unspecified, Generation: 134
Protocol iso, MTU: Unlimited, Generation: 132, Route table: 0
Flags: None
Protocol inet6, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 130, Route table: 0
Flags: None
Addresses, Flags: None
Destination: Unspecified, Local: 2001:db8:eb18:6337::2
Generation: 153
Addresses, Flags: None
Destination: Unspecified, Local: 2001:db8:223c:ca45::b
Generation: 136
Addresses, Flags: None
Destination: Unspecified, Local: 2001:db8:223c:2c16::1
Generation: 147
Addresses, Flags: None
Destination: Unspecified, Local: fe80::250:560f:fc8d:7c08
Generation: 138
Addresses, Flags: None
Destination: Unspecified, Local: fe80::250:56ff:feff:37f9
Generation: 155
Addresses, Flags: None
Destination: Unspecified, Local: fe80::250:56ff:feff:56b6
Protocol mpls, MTU: Unlimited, Maximum labels: 3, Generation: 149
Generation: 137, Route table: 0
Flags: None
Protocol 85, MTU: Unlimited, Generation: 129, Route table: 0
Flags: None
Logical interface .local..1 (Index 1) (SNMP ifIndex 0) (Generation 2)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 134, Route table: 1
Flags: None
Addresses, Flags: Is-Primary
Destination: Unspecified, Local: 10.0.0.4, Broadcast: Unspecified, Generation: 130
Addresses, Flags: None
Destination: Unspecified, Local: 172.16.64.1, Broadcast: Unspecified, Generation: 142
Addresses, Flags: None
Destination: Unspecified, Local: 172.16.64.4, Broadcast: Unspecified, Generation: 129
Protocol inet6, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 131, Route table: 1
Flags: None
Addresses, Flags: None
Destination: Unspecified, Local: fe80::250:56ff:feff:e2c1
Generation: 131
Addresses, Flags: None
Destination: Unspecified, Local: 2001:db8:8d82::a:0:0:4
Protocol vpls, MTU: Unlimited, Generation: 132
Generation: 138, Route table: 1
Flags: None
Logical interface .local..2 (Index 2) (SNMP ifIndex 0) (Generation 2)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 135, Route table: 2
Flags: None
Addresses, Flags: Is-Primary
Destination: Unspecified, Local: 127.0.0.1, Broadcast: Unspecified, Generation: 140
Logical interface .local..3 (Index 323) (SNMP ifIndex 0) (Generation 132)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 147, Route table: 3
Flags: None
Logical interface .local..4 (Index 324) (SNMP ifIndex 0) (Generation 133)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 148, Route table: 4
Flags: None
Addresses, Flags: Is-Primary
Destination: Unspecified, Local: 172.16.64.127, Broadcast: Unspecified, Generation: 143
Logical interface .local..5 (Index 326) (SNMP ifIndex 0) (Generation 135)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 150, Route table: 5
Flags: None
Protocol iso, MTU: Unlimited, Generation: 151, Route table: 5
Flags: None
Protocol inet6, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 153, Route table: 5
Flags: None
Logical interface .local..6 (Index 327) (SNMP ifIndex 0) (Generation 136)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Protocol mpls, MTU: Unlimited, Maximum labels: 3, Generation: 152, Route table: 6
Flags: None
Logical interface .local..7 (Index 328) (SNMP ifIndex 0) (Generation 137)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Protocol vpls, MTU: Unlimited, Generation: 154, Route table: 7
Flags: None
Logical interface .local..36735 (Index 262016) (SNMP ifIndex 0) (Generation 2)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Logical interface .local..36736 (Index 262017) (SNMP ifIndex 0) (Generation 2)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 136, Route table: 36736
Flags: None
Logical interface .local..36737 (Index 262018) (SNMP ifIndex 0) (Generation 2)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Logical interface .local..36738 (Index 262019) (SNMP ifIndex 0) (Generation 1)
Flags: Point-To-Point Encapsulation: Unspecified
Bandwidth: 0
Physical interface: cbp0, Enabled, Physical link is Up
Interface index: 129, SNMP ifIndex: 501, Generation: 132
Type: Ethernet, Link-level type: Ethernet, MTU: 9192, Clocking: Unspecified, Speed: Unspecified
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 2c:6b:f5:ff:01:29, Hardware address: 2c:6b:f5:ff:01:29
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: demux0, Enabled, Physical link is Up
Interface index: 128, SNMP ifIndex: 502, Generation: 131
Type: Software-Pseudo, Link-level type: Unspecified, MTU: 9192, Clocking: 1, Speed: Unspecified
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: dsc, Enabled, Physical link is Up
Interface index: 5, SNMP ifIndex: 5, Generation: 6
Type: Software-Pseudo, Link-level type: Unspecified, MTU: Unlimited, Clocking: Unspecified, Speed: Unspecified
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Link type : Unspecified
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: em1, Enabled, Physical link is Up
Interface index: 65, SNMP ifIndex: 23, Generation: 2
Type: Ethernet, Link-level type: Ethernet, MTU: 1514, Clocking: Unspecified, Speed: Unspecified
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Unspecified
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 00:50:56:ff:e2:c1, Hardware address: 00:50:56:ff:e2:c1
Alternate link address: Unspecified
Last flapped : 2019-08-29 09:03:11 UTC (29w6d 22:25 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Logical interface em1.0 (Index 3) (SNMP ifIndex 24) (Generation 2)
Flags: Up SNMP-Traps 0x4000000 Encapsulation: ENET2
Traffic statistics:
Input bytes : 102691292552
Output bytes : 106913726719
Input packets: 725074463
Output packets: 794456958
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Local statistics:
Input bytes : 102691292552
Output bytes : 106913726719
Input packets: 725074463
Output packets: 794456958
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 139, Route table: 1
Flags: Is-Primary
Addresses, Flags: Is-Preferred
Destination: 10/8, Local: 10.0.0.4, Broadcast: 10.255.255.255, Generation: 2
Addresses, Flags: Preferred Kernel Is-Preferred
Destination: 128/2, Local: 172.16.64.1, Broadcast: 172.16.16.255, Generation: 7
Addresses, Flags: Primary Is-Default Is-Primary
Destination: 128/2, Local: 172.16.64.4, Broadcast: 172.16.16.255, Generation: 1
Protocol inet6, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 140, Route table: 1
Flags: Is-Primary
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::250:56ff:feff:e2c1
Generation: 3
Addresses, Flags: Is-Default Is-Preferred Is-Primary
Destination: 2001:db8:8d82::/64, Local: 2001:db8:8d82::a:0:0:4
Protocol tnp, MTU: 1500, Generation: 4
Generation: 141, Route table: 1
Flags: Primary, Is-Primary
Addresses, Flags: None
Destination: Unspecified, Local: 0x4, Broadcast: Unspecified, Generation: 5
Physical interface: esi, Enabled, Physical link is Up
Interface index: 134, SNMP ifIndex: 503, Generation: 137
Type: Software-Pseudo, Link-level type: VxLAN-Tunnel-Endpoint, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: fti0, Enabled, Physical link is Up
Interface index: 136, SNMP ifIndex: 504, Generation: 139
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: fti1, Enabled, Physical link is Up
Interface index: 137, SNMP ifIndex: 505, Generation: 140
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: fti2, Enabled, Physical link is Up
Interface index: 138, SNMP ifIndex: 506, Generation: 141
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: fti3, Enabled, Physical link is Up
Interface index: 139, SNMP ifIndex: 507, Generation: 142
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: fti4, Enabled, Physical link is Up
Interface index: 140, SNMP ifIndex: 508, Generation: 143
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: fti5, Enabled, Physical link is Up
Interface index: 141, SNMP ifIndex: 509, Generation: 144
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: fti6, Enabled, Physical link is Up
Interface index: 142, SNMP ifIndex: 510, Generation: 145
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: fti7, Enabled, Physical link is Up
Interface index: 143, SNMP ifIndex: 511, Generation: 146
Type: FTI, Link-level type: Flexible-tunnel-Interface, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: fxp0, Enabled, Physical link is Up
Interface index: 64, SNMP ifIndex: 1, Generation: 1
Type: Ethernet, Link-level type: Ethernet, MTU: 1514, Clocking: Unspecified, Speed: Unspecified
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Unspecified
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 00:50:56:ff:0a:95, Hardware address: 00:50:56:ff:0a:95
Alternate link address: Unspecified
Last flapped : 2019-08-29 09:03:11 UTC (29w6d 22:25 ago)
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Logical interface fxp0.0 (Index 4) (SNMP ifIndex 13) (Generation 3)
Flags: Up SNMP-Traps 0x4000000 Encapsulation: ENET2
Traffic statistics:
Input bytes : 46289683
Output bytes : 207724636
Input packets: 620829
Output packets: 896062
Local statistics:
Input bytes : 46289683
Output bytes : 207724636
Input packets: 620829
Output packets: 896062
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 2, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 142, Route table: 0
Flags: Sendbcast-pkt-to-re, Is-Primary
Addresses, Flags: Is-Preferred Is-Primary
Destination: 1.0.0/24, Local: 10.1.0.101, Broadcast: 10.1.0.255, Generation: 6
Physical interface: gre, Enabled, Physical link is Up
Interface index: 10, SNMP ifIndex: 8, Generation: 11
Type: GRE, Link-level type: GRE, MTU: Unlimited, Speed: Unlimited
Hold-times : Up 0 ms, Down 0 ms
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Physical interface: ipip, Enabled, Physical link is Up
Interface index: 11, SNMP ifIndex: 9, Generation: 12
Type: IPIP, Link-level type: IP-over-IP, MTU: Unlimited, Speed: Unlimited
Hold-times : Up 0 ms, Down 0 ms
Device flags : Present Running
Interface flags: SNMP-Traps
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Physical interface: irb, Enabled, Physical link is Up
Interface index: 132, SNMP ifIndex: 512, Generation: 135
Type: Ethernet, Link-level type: Ethernet, MTU: 1514, Clocking: Unspecified, Speed: Unspecified
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 2c:6b:f5:ff:08:09, Hardware address: 2c:6b:f5:ff:08:09
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: jsrv, Enabled, Physical link is Up
Interface index: 144, SNMP ifIndex: 513, Generation: 147
Type: Ethernet, Link-level type: Ethernet, MTU: 1514, Clocking: Unspecified, Speed: Unspecified
Device flags : Present Running
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 2c:6b:f5:ff:08:d8, Hardware address: 2c:6b:f5:ff:08:d8
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Logical interface jsrv.1 (Index 325) (SNMP ifIndex 514) (Generation 134)
Flags: Up 0x24004000 Encapsulation: unknown
Bandwidth: 1Gbps
Routing Instance: None Bridging Domain: None
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Local statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
Protocol inet, MTU: 1514
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 149, Route table: 4
Flags: Is-Primary
Addresses, Flags: Primary Is-Default Is-Preferred Is-Primary
Destination: 128/2, Local: 172.16.64.127, Broadcast: 172.16.16.255, Generation: 144
Physical interface: lo0, Enabled, Physical link is Up
Interface index: 6, SNMP ifIndex: 6, Generation: 7
Type: Loopback, Link-level type: Unspecified, MTU: Unlimited, Clocking: Unspecified, Speed: Unspecified
Device flags : Present Running Loopback
Interface flags: SNMP-Traps
Link type : Unspecified
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 38208810127
Output bytes : 38208810127
Input packets: 33943400
Output packets: 33943400
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Logical interface lo0.0 (Index 320) (SNMP ifIndex 16) (Generation 129)
Flags: SNMP-Traps Encapsulation: Unspecified
Traffic statistics:
Input bytes : 12188
Output bytes : 12188
Input packets: 83
Output packets: 83
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Local statistics:
Input bytes : 12188
Output bytes : 12188
Input packets: 83
Output packets: 83
Transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
IPv6 transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 143, Route table: 0
Flags: No-Redirects, Sendbcast-pkt-to-re
Input Filters: local-access-control
Addresses, Flags: Is-Default Is-Primary
Destination: Unspecified, Local: 10.189.5.252, Broadcast: Unspecified, Generation: 135
Protocol inet6, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 144, Route table: 0
Input Filters: v6_local-access-control
Addresses, Flags: Is-Default Is-Primary
Destination: Unspecified, Local: 2001:db8:223c:ca45::b
Generation: 137
Destination: Unspecified, Local: fe80::250:560f:fc8d:7c08
Generation: 139
Logical interface lo0.16384 (Index 322) (SNMP ifIndex 21) (Generation 131)
Flags: SNMP-Traps Encapsulation: Unspecified
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Local statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 146, Route table: 2
Addresses, Flags: None
Destination: Unspecified, Local: 127.0.0.1, Broadcast: Unspecified, Generation: 141
Logical interface lo0.16385 (Index 321) (SNMP ifIndex 22) (Generation 130)
Flags: SNMP-Traps Encapsulation: Unspecified
Traffic statistics:
Input bytes : 38208797939
Output bytes : 38208797939
Input packets: 33943317
Output packets: 33943317
Local statistics:
Input bytes : 38208797939
Output bytes : 38208797939
Input packets: 33943317
Output packets: 33943317
Transit statistics:
Input bytes : 0 0 bps
Output bytes : 0 0 bps
Input packets: 0 0 pps
Output packets: 0 0 pps
Protocol inet, MTU: Unlimited
Max nh cache: 0, New hold nh limit: 0, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 145, Route table: 1
Physical interface: lsi, Enabled, Physical link is Up
Interface index: 4, SNMP ifIndex: 4, Generation: 5
Type: Software-Pseudo, Link-level type: LSI, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Link type : Unspecified
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: mtun, Enabled, Physical link is Up
Interface index: 66, SNMP ifIndex: 12, Generation: 3
Type: Multicast-GRE, Link-level type: GRE, MTU: Unlimited, Speed: Unlimited
Hold-times : Up 0 ms, Down 0 ms
Device flags : Present Running
Interface flags: SNMP-Traps
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Physical interface: pimd, Enabled, Physical link is Up
Interface index: 26, SNMP ifIndex: 11, Generation: 129
Type: PIMD, Link-level type: PIM-Decapsulator, MTU: Unlimited, Speed: Unlimited
Hold-times : Up 0 ms, Down 0 ms
Device flags : Present Running
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Physical interface: pime, Enabled, Physical link is Up
Interface index: 25, SNMP ifIndex: 10, Generation: 130
Type: PIME, Link-level type: PIM-Encapsulator, MTU: Unlimited, Speed: Unlimited
Hold-times : Up 0 ms, Down 0 ms
Device flags : Present Running
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Physical interface: pip0, Enabled, Physical link is Up
Interface index: 130, SNMP ifIndex: 515, Generation: 133
Type: Ethernet, Link-level type: Ethernet, MTU: 9192, Clocking: Unspecified, Speed: Unspecified
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 2c:6b:f5:ff:08:c8, Hardware address: 2c:6b:f5:ff:08:c8
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: pp0, Enabled, Physical link is Up
Interface index: 131, SNMP ifIndex: 516, Generation: 134
Type: PPPoE, Link-level type: PPPoE, MTU: 1532, Speed: Unspecified
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Physical interface: rbeb, Enabled, Physical link is Up
Interface index: 135, SNMP ifIndex: 517, Generation: 138
Type: Software-Pseudo, Link-level type: Remote-BEB, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: tap, Enabled, Physical link is Up
Interface index: 12, SNMP ifIndex: 7, Generation: 13
Type: Software-Pseudo, Link-level type: Interface-Specific, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link type : Unspecified
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
Physical interface: vtep, Enabled, Physical link is Up
Interface index: 133, SNMP ifIndex: 518, Generation: 136
Type: Software-Pseudo, Link-level type: VxLAN-Tunnel-Endpoint, MTU: Unlimited, Clocking: Unspecified, Speed: Unlimited
Device flags : Present Running
Link type : Full-Duplex
Link flags : None
Physical info : Unspecified
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: Unspecified, Hardware address: Unspecified
Alternate link address: Unspecified
Last flapped : Never
Statistics last cleared: Never
Traffic statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
IPv6 transit statistics:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Giants: 0, Policed discards: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, MTU errors: 0, Resource errors: 0
"""
}
golden_parsed_output_2 = {
"interface-information": {
"physical-interface": [
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:56:b6",
"description": "none/100G/in/hktGCS002_ge-0/0/0",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "21604601324",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "133726919",
"input-unicasts": "133726908",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "16828244544",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "129183374",
"output-unicasts": "129183361"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:56:b6",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2019-08-29 09:09:19 UTC (29w6d 22:19 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "148",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-no-redirects": True,
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
}
},
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "2001:db8:223c:2c16::/64",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "2001:db8:223c:2c16::1"
},
{
"ifa-destination": "fe80::/64",
"ifa-flags": {
"ifaf-is-preferred": True
},
"ifa-local": "fe80::250:56ff:feff:56b6"
}
],
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "mpls",
"generation": "150",
"maximum-labels": "3",
"mtu": "1488"
},
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "multiservice",
"generation": "163",
"mtu": "Unlimited",
"route-table": "0"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "333",
"name": "ge-0/0/0.0",
"snmp-index": "606",
"traffic-statistics": {
"input-bytes": "19732539397",
"input-packets": "133726363",
"ipv6-transit-statistics": {
"input-bytes": "12676733166",
"input-packets": "63558712",
"output-bytes": "11303933633",
"output-packets": "61684919"
},
"output-bytes": "15997705213",
"output-packets": "129306864"
},
"transit-traffic-statistics": {
"input-bps": "3152",
"input-bytes": "7055806231",
"input-packets": "70167651",
"input-pps": "5",
"ipv6-transit-statistics": {
"input-bps": "1856",
"input-bytes": "737203554",
"input-packets": "7541948",
"input-pps": "2",
"output-bps": "0",
"output-bytes": "1018758352",
"output-packets": "6986863",
"output-pps": "0"
},
"output-bps": "816",
"output-bytes": "4693771580",
"output-packets": "67621945",
"output-pps": "1"
}
}
],
"loopback": "Disabled",
"lsi-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0"
},
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/0",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "1",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "526",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "3152",
"input-bytes": "19732539397",
"input-packets": "133726363",
"input-pps": "5",
"ipv6-transit-statistics": {
"input-bytes": "737203554",
"input-packets": "7541948",
"output-bytes": "1018758352",
"output-packets": "6986863"
},
"output-bps": "3160",
"output-bytes": "16367814635",
"output-packets": "129306863",
"output-pps": "4"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Unspecified",
"local-index": "145",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "vpls",
"generation": "155",
"mtu": "Unlimited",
"route-table": "1"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "329",
"logical-interface-bandwidth": "0",
"name": "lc-0/0/0.32769",
"snmp-index": "520",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"transit-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
}
],
"name": "lc-0/0/0",
"output-error-list": {},
"snmp-index": "519",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Unspecified",
"local-index": "147",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet6",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "330",
"logical-interface-bandwidth": "0",
"name": "pfe-0/0/0.16383",
"snmp-index": "523",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
},
"transit-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"ipv6-transit-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
},
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
}
],
"name": "pfe-0/0/0",
"output-error-list": {},
"snmp-index": "522",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Unspecified",
"local-index": "146",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "331",
"logical-interface-bandwidth": "0",
"name": "pfh-0/0/0.16383",
"snmp-index": "524",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"transit-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
},
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "332",
"logical-interface-bandwidth": "0",
"name": "pfh-0/0/0.16384",
"snmp-index": "525",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"transit-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
}
],
"name": "pfh-0/0/0",
"output-error-list": {},
"snmp-index": "521",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:37:f9",
"description": "YW7079/9.6G/BB/sjkGDS221-EC11_xe-0/1/5[SJC]_Area8_Cost100",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "40247994921",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "376916517",
"input-unicasts": "376916499",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "45995779695",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "370414748",
"output-unicasts": "370414722"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:37:f9",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2019-08-29 09:09:19 UTC (29w6d 22:19 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "149",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-no-redirects": True,
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
}
},
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "2001:db8:eb18:6337::/64",
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "2001:db8:eb18:6337::2"
},
{
"ifa-destination": "fe80::/64",
"ifa-flags": {
"ifaf-is-preferred": True
},
"ifa-local": "fe80::250:56ff:feff:37f9"
}
],
"intf-curr-cnt": "2",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-name": "mpls",
"generation": "156",
"maximum-labels": "3",
"mtu": "1488"
},
{
"address-family-name": "multiservice",
"generation": "166",
"mtu": "Unlimited",
"route-table": "0"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "334",
"name": "ge-0/0/1.0",
"snmp-index": "605",
"traffic-statistics": {
"input-bytes": "34950288700",
"input-packets": "376916510",
"ipv6-transit-statistics": {
"input-bytes": "13617655381",
"input-packets": "85070342",
"output-bytes": "18694395654",
"output-packets": "90794602"
},
"output-bytes": "42238503795",
"output-packets": "370594612"
},
"transit-traffic-statistics": {
"input-bps": "3368",
"input-bytes": "21332633319",
"input-packets": "291846168",
"input-pps": "6",
"ipv6-transit-statistics": {
"input-bps": "3360",
"input-bytes": "3303092203",
"input-packets": "41039648",
"input-pps": "5",
"output-bps": "1136",
"output-bytes": "3127179954",
"output-packets": "41594426",
"output-pps": "1"
},
"output-bps": "2144",
"output-bytes": "23544108141",
"output-packets": "279800010",
"output-pps": "4"
}
}
],
"loopback": "Disabled",
"lsi-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0"
},
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/1",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "1",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "527",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "5304",
"input-bytes": "34950288700",
"input-packets": "376916510",
"input-pps": "9",
"ipv6-transit-statistics": {
"input-bytes": "3303092203",
"input-packets": "41039648",
"output-bytes": "3127179954",
"output-packets": "41594426"
},
"output-bps": "8016",
"output-bytes": "42783271407",
"output-packets": "370594612",
"output-pps": "9"
}
},
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:1e:ba",
"description": "ve-hkgasr01_Gi2[DefaultCost1000]",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "38187795706",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "252983787",
"input-unicasts": "252983783",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "30274309615",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "229070544",
"output-unicasts": "229070540"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:1e:ba",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2020-03-05 16:04:34 UTC (2w6d 15:23 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "150",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
}
},
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-name": "mpls",
"generation": "180",
"maximum-labels": "3",
"mtu": "1488",
"route-table": "0"
},
{
"address-family-name": "multiservice",
"generation": "181",
"mtu": "Unlimited",
"route-table": "0"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "336",
"name": "ge-0/0/2.0",
"snmp-index": "536",
"traffic-statistics": {
"input-bytes": "11458939228",
"input-packets": "31742480",
"output-bytes": "13615419042",
"output-packets": "28915016"
},
"transit-traffic-statistics": {
"input-bps": "880",
"input-bytes": "19772433990",
"input-packets": "178635019",
"input-pps": "1",
"output-bps": "360",
"output-bytes": "13648516462",
"output-packets": "193694615",
"output-pps": "0"
}
}
],
"loopback": "Disabled",
"lsi-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0"
},
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/2",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "47",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "528",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "880",
"input-bytes": "34302334175",
"input-packets": "248114960",
"input-pps": "1",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bps": "880",
"output-bytes": "27932035013",
"output-packets": "229304654",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:93:cb",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "1157295",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "14683",
"input-unicasts": "14683",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "3441533",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "17425",
"output-unicasts": "17425"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:93:cb",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2019-10-25 08:50:18 UTC (21w5d 22:38 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "151",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
}
},
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-name": "multiservice",
"generation": "175",
"mtu": "Unlimited",
"route-table": "0"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "335",
"name": "ge-0/0/3.0",
"snmp-index": "537",
"traffic-statistics": {
"input-bytes": "667980",
"input-packets": "11133",
"output-bytes": "467670",
"output-packets": "11135"
},
"transit-traffic-statistics": {
"input-bps": "0",
"input-bytes": "424988",
"input-packets": "3486",
"input-pps": "0",
"output-bps": "0",
"output-bytes": "2885485",
"output-packets": "6291",
"output-pps": "0"
}
}
],
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/3",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "3",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "529",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "0",
"input-bytes": "1092968",
"input-packets": "14619",
"input-pps": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bps": "0",
"output-bytes": "3419965",
"output-packets": "17426",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:3e:28",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "0",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "0",
"input-unicasts": "0",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "0",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "0",
"output-unicasts": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:3e:28",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 22:19 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "152",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/4",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "2",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "530",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:1d",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "0",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "0",
"input-unicasts": "0",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "0",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "0",
"output-unicasts": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:1d",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 22:19 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "153",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/5",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "2",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "531",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:1e",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "0",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "0",
"input-unicasts": "0",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "0",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "0",
"output-unicasts": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:1e",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 22:19 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "154",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/6",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "2",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "532",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:1f",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "0",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "0",
"input-unicasts": "0",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "0",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "0",
"output-unicasts": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:1f",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 22:19 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "155",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/7",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "2",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "533",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:20",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "0",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "0",
"input-unicasts": "0",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "0",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "0",
"output-unicasts": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:20",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 22:19 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "156",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/8",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "2",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "534",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:21",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-mac-statistics": {
"input-broadcasts": "0",
"input-bytes": "0",
"input-code-violations": "0",
"input-crc-errors": "0",
"input-fifo-errors": "0",
"input-fragment-frames": "0",
"input-jabber-frames": "0",
"input-mac-control-frames": "0",
"input-mac-pause-frames": "0",
"input-multicasts": "0",
"input-oversized-frames": "0",
"input-packets": "0",
"input-unicasts": "0",
"input-vlan-tagged-frames": "0",
"output-broadcasts": "0",
"output-bytes": "0",
"output-crc-errors": "0",
"output-fifo-errors": "0",
"output-mac-control-frames": "0",
"output-mac-pause-frames": "0",
"output-multicasts": "0",
"output-packets": "0",
"output-unicasts": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:21",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"input-error-list": {
"framing-errors": "0",
"input-discards": "0",
"input-drops": "0",
"input-errors": "0",
"input-fifo-errors": "0",
"input-l2-channel-errors": "0",
"input-l2-mismatch-timeouts": "0",
"input-l3-incompletes": "0",
"input-resource-errors": "0",
"input-runts": "0"
},
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 22:19 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "157",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/9",
"output-error-list": {
"aged-packets": "0",
"carrier-transitions": "2",
"hs-link-crc-errors": "0",
"mtu-errors": "0",
"output-collisions": "0",
"output-drops": "0",
"output-errors": "0",
"output-fifo-errors": "0",
"output-resource-errors": "0"
},
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "535",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"stp-traffic-statistics": {
"stp-input-bytes-dropped": "0",
"stp-input-packets-dropped": "0",
"stp-output-bytes-dropped": "0",
"stp-output-packets-dropped": "0"
},
"traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-loopback": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Unspecified",
"local-index": "0",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"interface-address": [
{
"ifa-flags": {
"ifaf-is-primary": True
}
},
{
"ifa-flags": {
"ifaf-none": True
}
},
{
"ifa-flags": {
"ifaf-none": True
}
},
{
"ifa-flags": {
"ifaf-none": True
}
},
{
"ifa-flags": {
"ifaf-none": True
}
},
{
"ifa-flags": {
"ifaf-none": True
}
}
],
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "iso",
"generation": "132",
"mtu": "Unlimited",
"route-table": "0"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "Unspecified",
"ifa-flags": {
"ifaf-none": True
},
"ifa-local": "2001:db8:eb18:6337::2"
},
{
"ifa-destination": "Unspecified",
"ifa-flags": {
"ifaf-none": True
},
"ifa-local": "2001:db8:223c:ca45::b"
},
{
"ifa-destination": "Unspecified",
"ifa-flags": {
"ifaf-none": True
},
"ifa-local": "2001:db8:223c:2c16::1"
},
{
"ifa-destination": "Unspecified",
"ifa-flags": {
"ifaf-none": True
},
"ifa-local": "fe80::250:560f:fc8d:7c08"
},
{
"ifa-destination": "Unspecified",
"ifa-flags": {
"ifaf-none": True
},
"ifa-local": "fe80::250:56ff:feff:37f9"
},
{
"ifa-destination": "Unspecified",
"ifa-flags": {
"ifaf-none": True
},
"ifa-local": "fe80::250:56ff:feff:56b6"
}
],
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "mpls",
"generation": "149",
"maximum-labels": "3",
"mtu": "Unlimited"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "85",
"generation": "129",
"mtu": "Unlimited",
"route-table": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "0",
"logical-interface-bandwidth": "0",
"name": ".local..0",
"snmp-index": "0"
},
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"interface-address": [
{
"ifa-flags": {
"ifaf-is-primary": True
}
},
{
"ifa-flags": {
"ifaf-none": True
}
},
{
"ifa-flags": {
"ifaf-none": True
}
}
],
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "Unspecified",
"ifa-flags": {
"ifaf-none": True
},
"ifa-local": "fe80::250:56ff:feff:e2c1"
},
{
"ifa-destination": "Unspecified",
"ifa-flags": {
"ifaf-none": True
},
"ifa-local": "2001:db8:8d82::a:0:0:4"
}
],
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "vpls",
"generation": "132",
"mtu": "Unlimited"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "1",
"logical-interface-bandwidth": "0",
"name": ".local..1",
"snmp-index": "0"
},
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-primary": True
}
},
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "2",
"logical-interface-bandwidth": "0",
"name": ".local..2",
"snmp-index": "0"
},
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "323",
"logical-interface-bandwidth": "0",
"name": ".local..3",
"snmp-index": "0"
},
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-primary": True
}
},
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "324",
"logical-interface-bandwidth": "0",
"name": ".local..4",
"snmp-index": "0"
},
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "iso",
"generation": "151",
"mtu": "Unlimited",
"route-table": "5"
},
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet6",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "326",
"logical-interface-bandwidth": "0",
"name": ".local..5",
"snmp-index": "0"
},
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "mpls",
"generation": "152",
"maximum-labels": "3",
"mtu": "Unlimited",
"route-table": "6"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "327",
"logical-interface-bandwidth": "0",
"name": ".local..6",
"snmp-index": "0"
},
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "vpls",
"generation": "154",
"mtu": "Unlimited",
"route-table": "7"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "328",
"logical-interface-bandwidth": "0",
"name": ".local..7",
"snmp-index": "0"
},
{
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "262016",
"logical-interface-bandwidth": "0",
"name": ".local..36735",
"snmp-index": "0"
},
{
"address-family": [
{
"address-family-flags": {
"ifff-none": True
},
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "262017",
"logical-interface-bandwidth": "0",
"name": ".local..36736",
"snmp-index": "0"
},
{
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "262018",
"logical-interface-bandwidth": "0",
"name": ".local..36737",
"snmp-index": "0"
},
{
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "262019",
"logical-interface-bandwidth": "0",
"name": ".local..36738",
"snmp-index": "0"
}
],
"name": ".local.",
"output-error-list": {},
"snmp-index": "0",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:01:29",
"hardware-physical-address": "2c:6b:f5:ff:01:29",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "129",
"name": "cbp0",
"output-error-list": {},
"snmp-index": "501",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "128",
"name": "demux0",
"output-error-list": {},
"snmp-index": "502",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Unspecified",
"local-index": "5",
"name": "dsc",
"output-error-list": {},
"snmp-index": "5",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "00:50:56:ff:e2:c1",
"hardware-physical-address": "00:50:56:ff:e2:c1",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "2019-08-29 09:03:11 UTC (29w6d 22:25 ago)"
},
"link-type": "Unspecified",
"local-index": "65",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet",
"interface-address": [
{
"ifa-flags": {
"ifaf-is-preferred": True
}
},
{
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-kernel": True,
"ifaf-preferred": True
}
},
{
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-primary": True,
"ifaf-primary": True
}
}
],
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "fe80::/64",
"ifa-flags": {
"ifaf-is-preferred": True
},
"ifa-local": "fe80::250:56ff:feff:e2c1"
},
{
"ifa-destination": "2001:db8:8d82::/64",
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-preferred": True,
"ifaf-is-primary": True
},
"ifa-local": "2001:db8:8d82::a:0:0:4"
}
],
"intf-curr-cnt": "1",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
},
{
"address-family-flags": {
"ifff-is-primary": True,
"ifff-primary": True
},
"address-family-name": "tnp",
"generation": "4",
"interface-address": {
"ifa-flags": {
"ifaf-none": True
}
},
"mtu": "1500"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4000000"
},
"local-index": "3",
"name": "em1.0",
"snmp-index": "24",
"traffic-statistics": {
"input-bytes": "102691292552",
"input-packets": "725074463",
"ipv6-transit-statistics": {
"input-bytes": "102691292552",
"input-packets": "725074463",
"output-bytes": "106913726719",
"output-packets": "794456958"
},
"output-bytes": "106913726719",
"output-packets": "794456958"
}
}
],
"name": "em1",
"output-error-list": {},
"snmp-index": "23",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "134",
"name": "esi",
"output-error-list": {},
"snmp-index": "503",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "136",
"name": "fti0",
"output-error-list": {},
"snmp-index": "504",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "137",
"name": "fti1",
"output-error-list": {},
"snmp-index": "505",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "138",
"name": "fti2",
"output-error-list": {},
"snmp-index": "506",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "139",
"name": "fti3",
"output-error-list": {},
"snmp-index": "507",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "140",
"name": "fti4",
"output-error-list": {},
"snmp-index": "508",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "141",
"name": "fti5",
"output-error-list": {},
"snmp-index": "509",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "142",
"name": "fti6",
"output-error-list": {},
"snmp-index": "510",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "143",
"name": "fti7",
"output-error-list": {},
"snmp-index": "511",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "00:50:56:ff:0a:95",
"hardware-physical-address": "00:50:56:ff:0a:95",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "2019-08-29 09:03:11 UTC (29w6d 22:25 ago)"
},
"link-type": "Unspecified",
"local-index": "64",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True,
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-preferred": True,
"ifaf-is-primary": True
}
},
"intf-curr-cnt": "2",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1500",
"new-hold-limit": "75000"
}
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4000000"
},
"local-index": "4",
"name": "fxp0.0",
"snmp-index": "13",
"traffic-statistics": {
"input-bytes": "46289683",
"input-packets": "620829",
"output-bytes": "207724636",
"output-packets": "896062"
}
}
],
"name": "fxp0",
"output-error-list": {},
"snmp-index": "1",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "GRE",
"local-index": "10",
"mtu": "Unlimited",
"name": "gre",
"snmp-index": "8",
"speed": "Unlimited",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "IP-over-IP",
"local-index": "11",
"mtu": "Unlimited",
"name": "ipip",
"snmp-index": "9",
"speed": "Unlimited",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:08:09",
"hardware-physical-address": "2c:6b:f5:ff:08:09",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "132",
"name": "irb",
"output-error-list": {},
"snmp-index": "512",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:08:d8",
"hardware-physical-address": "2c:6b:f5:ff:08:d8",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "144",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-preferred": True,
"ifaf-is-primary": True,
"ifaf-primary": True
}
},
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "75000",
"mtu": "1514",
"new-hold-limit": "75000"
}
],
"encapsulation": "unknown",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x24004000"
},
"local-index": "325",
"logical-interface-bandwidth": "1Gbps",
"name": "jsrv.1",
"snmp-index": "514",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"transit-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
}
],
"name": "jsrv",
"output-error-list": {},
"snmp-index": "513",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-loopback": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Unspecified",
"local-index": "6",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-no-redirects": True,
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-primary": True
}
},
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
},
{
"address-family-name": "inet6",
"interface-address": {
"ifa-destination": "Unspecified",
"ifa-flags": {
"ifaf-is-default": True,
"ifaf-is-primary": True
},
"ifa-local": "fe80::250:560f:fc8d:7c08"
},
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "320",
"name": "lo0.0",
"snmp-index": "16",
"traffic-statistics": {
"input-bytes": "12188",
"input-packets": "83",
"ipv6-transit-statistics": {
"input-bytes": "12188",
"input-packets": "83",
"output-bytes": "12188",
"output-packets": "83"
},
"output-bytes": "12188",
"output-packets": "83"
},
"transit-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"ipv6-transit-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
},
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
},
{
"address-family": [
{
"address-family-name": "inet",
"interface-address": {
"ifa-flags": {
"ifaf-none": True
}
},
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "322",
"name": "lo0.16384",
"snmp-index": "21",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"transit-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
},
{
"address-family": [
{
"address-family-name": "inet",
"intf-curr-cnt": "0",
"intf-dropcnt": "0",
"intf-unresolved-cnt": "0",
"max-local-cache": "0",
"mtu": "Unlimited",
"new-hold-limit": "0"
}
],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "321",
"name": "lo0.16385",
"snmp-index": "22",
"traffic-statistics": {
"input-bytes": "38208797939",
"input-packets": "33943317",
"output-bytes": "38208797939",
"output-packets": "33943317"
},
"transit-traffic-statistics": {
"input-bps": "0",
"input-bytes": "0",
"input-packets": "0",
"input-pps": "0",
"output-bps": "0",
"output-bytes": "0",
"output-packets": "0",
"output-pps": "0"
}
}
],
"name": "lo0",
"output-error-list": {},
"snmp-index": "6",
"traffic-statistics": {
"input-bytes": "38208810127",
"input-packets": "33943400",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "38208810127",
"output-packets": "33943400"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Unspecified",
"local-index": "4",
"name": "lsi",
"output-error-list": {},
"snmp-index": "4",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "GRE",
"local-index": "66",
"mtu": "Unlimited",
"name": "mtun",
"snmp-index": "12",
"speed": "Unlimited",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "PIM-Decapsulator",
"local-index": "26",
"mtu": "Unlimited",
"name": "pimd",
"snmp-index": "11",
"speed": "Unlimited",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "PIM-Encapsulator",
"local-index": "25",
"mtu": "Unlimited",
"name": "pime",
"snmp-index": "10",
"speed": "Unlimited",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:08:c8",
"hardware-physical-address": "2c:6b:f5:ff:08:c8",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "130",
"name": "pip0",
"output-error-list": {},
"snmp-index": "515",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"link-level-type": "PPPoE",
"link-type": "Full-Duplex",
"local-index": "131",
"mtu": "1532",
"name": "pp0",
"snmp-index": "516",
"speed": "Unspecified"
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "135",
"name": "rbeb",
"output-error-list": {},
"snmp-index": "517",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Unspecified",
"local-index": "12",
"name": "tap",
"output-error-list": {},
"snmp-index": "7",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "Unspecified",
"hardware-physical-address": "Unspecified",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"input-error-list": {},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "133",
"name": "vtep",
"output-error-list": {},
"snmp-index": "518",
"traffic-statistics": {
"input-bytes": "0",
"input-packets": "0",
"ipv6-transit-statistics": {
"input-bytes": "0",
"input-packets": "0",
"output-bytes": "0",
"output-packets": "0"
},
"output-bytes": "0",
"output-packets": "0"
}
}
]
}
}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
interface_obj = ShowInterfaces(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
interface_obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
interface_obj = ShowInterfaces(device=self.device)
parsed_output = interface_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden_2(self):
self.device = Mock(**self.golden_output_2)
interface_obj = ShowInterfaces(device=self.device)
parsed_output = interface_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
# =======================================================
# Unit test for 'show interfaces queue {interface}'
# =======================================================
class TestShowInterfacesQueue(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
# show interfaces queue ge-0/0/2
golden_output = {
"execute.return_value": """
Physical interface: ge-0/0/2, Enabled, Physical link is Up
Interface index: 143, SNMP ifIndex: 601
Description: to_ixia_2/4
Forwarding classes: 16 supported, 5 in use
Egress queues: 8 supported, 5 in use
Queue: 0, Forwarding classes: Bronze-FC
Queued:
Packets : 1470816406 0 pps
Bytes : 564883280956 0 bps
Transmitted:
Packets : 1470816406 0 pps
Bytes : 564883280956 0 bps
Tail-dropped packets : 0 0 pps
RED-dropped packets : 0 0 pps
Low : 0 0 pps
Medium-low : 0 0 pps
Medium-high : 0 0 pps
High : 0 0 pps
RED-dropped bytes : 0 0 bps
Low : 0 0 bps
Medium-low : 0 0 bps
Medium-high : 0 0 bps
High : 0 0 bps
Queue: 1, Forwarding classes: Platinum-FC
Queued:
Packets : 0 0 pps
Bytes : 0 0 bps
Transmitted:
Packets : 0 0 pps
Bytes : 0 0 bps
Tail-dropped packets : 0 0 pps
RED-dropped packets : 0 0 pps
Low : 0 0 pps
Medium-low : 0 0 pps
Medium-high : 0 0 pps
High : 0 0 pps
RED-dropped bytes : 0 0 bps
Low : 0 0 bps
Medium-low : 0 0 bps
Medium-high : 0 0 bps
High : 0 0 bps
Queue: 2, Forwarding classes: Gold-FC
Queued:
Packets : 0 0 pps
Bytes : 0 0 bps
Transmitted:
Packets : 0 0 pps
Bytes : 0 0 bps
Tail-dropped packets : 0 0 pps
RED-dropped packets : 0 0 pps
Low : 0 0 pps
Medium-low : 0 0 pps
Medium-high : 0 0 pps
High : 0 0 pps
RED-dropped bytes : 0 0 bps
Low : 0 0 bps
Medium-low : 0 0 bps
Medium-high : 0 0 bps
High : 0 0 bps
Queue: 3, Forwarding classes: Network-Control-FC
Queued:
Packets : 0 0 pps
Bytes : 0 0 bps
Transmitted:
Packets : 0 0 pps
Bytes : 0 0 bps
Tail-dropped packets : 0 0 pps
RED-dropped packets : 0 0 pps
Low : 0 0 pps
Medium-low : 0 0 pps
Medium-high : 0 0 pps
High : 0 0 pps
RED-dropped bytes : 0 0 bps
Low : 0 0 bps
Medium-low : 0 0 bps
Medium-high : 0 0 bps
High : 0 0 bps
Queue: 4, Forwarding classes: Silver-FC
Queued:
Packets : 0 0 pps
Bytes : 0 0 bps
Transmitted:
Packets : 0 0 pps
Bytes : 0 0 bps
Tail-dropped packets : 0 0 pps
RED-dropped packets : 0 0 pps
Low : 0 0 pps
Medium-low : 0 0 pps
Medium-high : 0 0 pps
High : 0 0 pps
RED-dropped bytes : 0 0 bps
Low : 0 0 bps
Medium-low : 0 0 bps
Medium-high : 0 0 bps
High : 0 0 bps
"""
}
golden_parsed_output = {
"interface-information": {
"physical-interface": {
"description": "to_ixia_2/4",
"local-index": "143",
"name": "ge-0/0/2",
"oper-status": "Up",
"queue-counters": {
"interface-cos-summary": {
"intf-cos-forwarding-classes-in-use": "5",
"intf-cos-forwarding-classes-supported": "16",
"intf-cos-num-queues-in-use": "5",
"intf-cos-num-queues-supported": "8",
"intf-cos-queue-type": "Egress " "queues",
},
"queue": [
{
"forwarding-class-name": "Bronze-FC",
"queue-counters-queued-bytes": "564883280956",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "1470816406",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "564883280956",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "1470816406",
"queue-counters-trans-packets-rate": "0",
"queue-number": "0",
},
{
"forwarding-class-name": "Platinum-FC",
"queue-counters-queued-bytes": "0",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "0",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "0",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "0",
"queue-counters-trans-packets-rate": "0",
"queue-number": "1",
},
{
"forwarding-class-name": "Gold-FC",
"queue-counters-queued-bytes": "0",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "0",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "0",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "0",
"queue-counters-trans-packets-rate": "0",
"queue-number": "2",
},
{
"forwarding-class-name": "Network-Control-FC",
"queue-counters-queued-bytes": "0",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "0",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "0",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "0",
"queue-counters-trans-packets-rate": "0",
"queue-number": "3",
},
{
"forwarding-class-name": "Silver-FC",
"queue-counters-queued-bytes": "0",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "0",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "0",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "0",
"queue-counters-trans-packets-rate": "0",
"queue-number": "4",
},
],
},
"snmp-index": "601",
}
}
}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
interface_obj = ShowInterfacesQueue(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
interface_obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
interface_obj = ShowInterfacesQueue(device=self.device)
parsed_output = interface_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
#############################################################################
# unitest For show interfaces policers interface
#############################################################################
class test_show_interfaces_policers_interface(unittest.TestCase):
device = Device(name="aDevice")
empty_output = {"execute.return_value": ""}
maxDiff = None
golden_parsed_output = {
"interface-policer-information": {
"physical-interface": [
{
"admin-status": "up",
"logical-interface": [
{
"admin-status": "up",
"name": "ge-0/0/2.0",
"oper-status": "up",
"policer-information": [
{
"policer-family": "inet",
"policer-input": "GE_1M-ge-0/0/2.0-log_int-i",
"policer-output": "GE_1M-ge-0/0/2.0-log_int-o"
},
{
"policer-family": "inet6",
"policer-input": "GE_1M-ge-0/0/2.0-log_int-i",
"policer-output": "GE_1M-ge-0/0/2.0-log_int-o"
},
{
"policer-family": "multiservice",
"policer-input": "__default_arp_policer__"
}
]
}
],
"name": "ge-0/0/2",
"oper-status": "up"
}
]
}
}
golden_output = {
"execute.return_value": """
Interface Admin Link Proto Input Policer Output Policer
ge-0/0/2 up up
ge-0/0/2.0 up up
inet GE_1M-ge-0/0/2.0-log_int-i GE_1M-ge-0/0/2.0-log_int-o
inet6 GE_1M-ge-0/0/2.0-log_int-i GE_1M-ge-0/0/2.0-log_int-o
multiservice __default_arp_policer__
"""
}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
interface_obj = ShowInterfacesPolicersInterface(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
interface_obj.parse(interface='ge-0/0/2')
def test_golden(self):
self.device = Mock(**self.golden_output)
interface_obj = ShowInterfacesPolicersInterface(device=self.device)
parsed_output = interface_obj.parse(interface='ge-0/0/2')
self.assertEqual(parsed_output, self.golden_parsed_output)
class TestShowInterfacesStatistics(unittest.TestCase):
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_output = {'execute.return_value': """
show interfaces statistics
Physical interface: ge-0/0/0, Enabled, Physical link is Up
Interface index: 133, SNMP ifIndex: 506
Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
Remote fault: Online
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: 5e:00:40:ff:00:00, Hardware address: 5e:00:40:ff:00:00
Last flapped : 2020-06-22 22:33:51 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 712 bps (1 pps)
Output rate : 0 bps (0 pps)
Input errors: 1568, Output errors: 0
Active alarms : None
Active defects : None
Interface transmit statistics: Disabled
Logical interface ge-0/0/0.0 (Index 70) (SNMP ifIndex 507)
Flags: SNMP-Traps 0x4000 Encapsulation: ENET2
Input packets : 4685
Output packets: 144
Security: Zone: trust
Allowed host-inbound traffic : dhcp http https ssh telnet
Protocol inet, MTU: 1500
Flags: Sendbcast-pkt-to-re, Is-Primary
Addresses, Flags: Is-Preferred Is-Primary
Destination: 172.16.1/24, Local: 172.16.1.55, Broadcast: 172.16.1.255
Physical interface: gr-0/0/0, Enabled, Physical link is Up
Interface index: 143, SNMP ifIndex: 519
Type: GRE, Link-level type: GRE, MTU: Unlimited, Speed: 800mbps
Link flags : Scheduler Keepalives DTE
Device flags : Present Running
Interface flags: Point-To-Point
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Physical interface: ip-0/0/0, Enabled, Physical link is Up
Interface index: 144, SNMP ifIndex: 520
Type: IPIP, Link-level type: IP-over-IP, MTU: Unlimited, Speed: 800mbps
Link flags : Scheduler Keepalives DTE
Device flags : Present Running
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Physical interface: lsq-0/0/0, Enabled, Physical link is Up
Interface index: 145, SNMP ifIndex: 521
Link-level type: LinkService, MTU: 1504
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps Internal: 0x4000
Last flapped : 2020-06-22 22:33:52 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Physical interface: lt-0/0/0, Enabled, Physical link is Up
Interface index: 147, SNMP ifIndex: 523
Type: Logical-tunnel, Link-level type: Logical-tunnel, MTU: Unlimited,
Speed: 800mbps
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Link flags : None
Physical info : 13
Current address: 02:96:14:ff:11:43, Hardware address: 02:96:14:ff:11:43
Last flapped : Never
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0
Physical interface: mt-0/0/0, Enabled, Physical link is Up
Interface index: 146, SNMP ifIndex: 522
Type: Multicast-GRE, Link-level type: GRE, MTU: Unlimited, Speed: 800mbps
Link flags : Keepalives DTE
Device flags : Present Running
Interface flags: SNMP-Traps
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Physical interface: sp-0/0/0, Enabled, Physical link is Up
Interface index: 142, SNMP ifIndex: 517
Type: Adaptive-Services, Link-level type: Adaptive-Services, MTU: 9192,
Speed: 800mbps
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps Internal: 0x4000
Link type : Full-Duplex
Link flags : None
Last flapped : 2020-06-22 22:33:52 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0
Logical interface sp-0/0/0.0 (Index 75) (SNMP ifIndex 518)
Flags: Point-To-Point SNMP-Traps Encapsulation: Adaptive-Services
Input packets : 0
Output packets: 0
Security: Zone: Null
Protocol inet, MTU: 9192
Flags: Receive-options, Receive-TTL-Exceeded
Protocol inet6, MTU: 9192
Flags: Primary, Is-Primary, Receive-options, Receive-TTL-Exceeded
Logical interface sp-0/0/0.16383 (Index 76) (SNMP ifIndex 524)
Flags: Point-To-Point SNMP-Traps Encapsulation: Adaptive-Services
Input packets : 0
Output packets: 0
Security: Zone: Null
Protocol inet, MTU: 9192
Flags: Is-Primary, Receive-options, Receive-TTL-Exceeded
Addresses, Flags: Is-Preferred Is-Primary
Destination: 10.0.0.16, Local: 10.0.0.1
Addresses
Local: 10.0.0.6
Addresses, Flags: Is-Preferred
Destination: 172.16.66.16, Local: 172.16.64.1
Addresses
Local: 172.16.64.6
Physical interface: ge-0/0/1, Enabled, Physical link is Up
Interface index: 134, SNMP ifIndex: 508
Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
Remote fault: Online
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: fa:16:3e:ff:7f:fd, Hardware address: fa:16:3e:ff:7f:fd
Last flapped : 2020-06-22 22:34:01 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 312 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 8, Output errors: 0
Active alarms : None
Active defects : None
Interface transmit statistics: Disabled
Logical interface ge-0/0/1.0 (Index 71) (SNMP ifIndex 516)
Flags: SNMP-Traps 0x4000 Encapsulation: ENET2
Input packets : 555
Output packets: 546
Security: Zone: trust
Allowed host-inbound traffic : bfd bgp dvmrp igmp ldp msdp nhrp ospf ospf3
pgm pim rip ripng router-discovery rsvp sap vrrp ping
Protocol inet, MTU: 1500
Flags: Sendbcast-pkt-to-re
Addresses, Flags: Is-Preferred Is-Primary
Destination: 40.0.0/24, Local: 10.70.0.4, Broadcast: 10.70.0.255
Protocol inet6, MTU: 1500
Flags: None
Addresses, Flags: Is-Preferred Is-Primary
Destination: 2001:40::/64, Local: 2001:40::4
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::fa16:3eff:feff:7ffd
Physical interface: ge-0/0/2, Enabled, Physical link is Up
Interface index: 135, SNMP ifIndex: 509
Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
Remote fault: Online
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: fa:16:3e:ff:0e:52, Hardware address: fa:16:3e:ff:0e:52
Last flapped : 2020-06-22 22:34:01 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 528 bps (0 pps)
Input errors: 2, Output errors: 0
Active alarms : None
Active defects : None
Interface transmit statistics: Disabled
Logical interface ge-0/0/2.0 (Index 72) (SNMP ifIndex 525)
Flags: SNMP-Traps 0x4000 Encapsulation: ENET2
Input packets : 450
Output packets: 465
Security: Zone: trust
Allowed host-inbound traffic : bfd bgp dvmrp igmp ldp msdp nhrp ospf ospf3
pgm pim rip ripng router-discovery rsvp sap vrrp ping
Protocol inet, MTU: 1500
Flags: Sendbcast-pkt-to-re
Addresses, Flags: Is-Preferred Is-Primary
Destination: 50.0.0/24, Local: 10.205.0.4, Broadcast: 10.205.0.255
Protocol inet6, MTU: 1500
Flags: None
Addresses, Flags: Is-Preferred Is-Primary
Destination: 2001:50::/64, Local: 2001:50::4
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::fa16:3eff:feff:e52
Physical interface: ge-0/0/3, Enabled, Physical link is Up
Interface index: 136, SNMP ifIndex: 510
Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
Remote fault: Online
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: fa:16:3e:ff:12:8e, Hardware address: fa:16:3e:ff:12:8e
Last flapped : 2020-06-22 22:34:01 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0
Active alarms : None
Active defects : None
Interface transmit statistics: Disabled
Logical interface ge-0/0/3.0 (Index 73) (SNMP ifIndex 526)
Flags: SNMP-Traps 0x4000 Encapsulation: ENET2
Input packets : 0
Output packets: 0
Security: Zone: trust
Allowed host-inbound traffic : bfd bgp dvmrp igmp ldp msdp nhrp ospf ospf3
pgm pim rip ripng router-discovery rsvp sap vrrp ping
Protocol inet, MTU: 1500
Flags: Sendbcast-pkt-to-re
Protocol inet6, MTU: 1500
Flags: None
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::fa16:3eff:feff:128e
Physical interface: ge-0/0/4, Enabled, Physical link is Up
Interface index: 137, SNMP ifIndex: 511
Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
Remote fault: Online
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: fa:16:3e:ff:37:bd, Hardware address: fa:16:3e:ff:37:bd
Last flapped : 2020-06-22 22:34:02 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0
Active alarms : None
Active defects : None
Interface transmit statistics: Disabled
Logical interface ge-0/0/4.0 (Index 74) (SNMP ifIndex 527)
Flags: SNMP-Traps 0x4000 Encapsulation: ENET2
Input packets : 0
Output packets: 0
Security: Zone: trust
Allowed host-inbound traffic : bfd bgp dvmrp igmp ldp msdp nhrp ospf ospf3
pgm pim rip ripng router-discovery rsvp sap vrrp ping
Protocol inet, MTU: 1500
Flags: Sendbcast-pkt-to-re
Protocol inet6, MTU: 1500
Flags: None
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::fa16:3eff:feff:37bd
Physical interface: ge-0/0/5, Enabled, Physical link is Up
Interface index: 138, SNMP ifIndex: 512
Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
Remote fault: Online
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: fa:16:3e:ff:38:28, Hardware address: fa:16:3e:ff:38:28
Last flapped : 2020-06-22 22:34:02 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0
Active alarms : None
Active defects : None
Interface transmit statistics: Disabled
Physical interface: ge-0/0/6, Enabled, Physical link is Up
Interface index: 139, SNMP ifIndex: 513
Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
Remote fault: Online
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: fa:16:3e:ff:54:ea, Hardware address: fa:16:3e:ff:54:ea
Last flapped : 2020-06-22 22:34:02 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0
Active alarms : None
Active defects : None
Interface transmit statistics: Disabled
Physical interface: ge-0/0/7, Enabled, Physical link is Up
Interface index: 140, SNMP ifIndex: 514
Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
Remote fault: Online
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: fa:16:3e:ff:05:a1, Hardware address: fa:16:3e:ff:05:a1
Last flapped : 2020-06-22 22:34:02 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0
Active alarms : None
Active defects : None
Interface transmit statistics: Disabled
Physical interface: ge-0/0/8, Enabled, Physical link is Up
Interface index: 141, SNMP ifIndex: 515
Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
Remote fault: Online
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Current address: fa:16:3e:ff:a2:b7, Hardware address: fa:16:3e:ff:a2:b7
Last flapped : 2020-06-22 22:34:02 EST (1w1d 00:22 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0
Active alarms : None
Active defects : None
Interface transmit statistics: Disabled
Physical interface: dsc, Enabled, Physical link is Up
Interface index: 5, SNMP ifIndex: 5
Type: Software-Pseudo, MTU: Unlimited
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Link flags : None
Last flapped : Never
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input packets : 0
Output packets: 0
Input errors: 0, Output errors: 0
Physical interface: gre, Enabled, Physical link is Up
Interface index: 10, SNMP ifIndex: 8
Type: GRE, Link-level type: GRE, MTU: Unlimited, Speed: Unlimited
Link flags : Keepalives DTE
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input packets : 0
Output packets: 0
Physical interface: ipip, Enabled, Physical link is Up
Interface index: 11, SNMP ifIndex: 9
Type: IPIP, Link-level type: IP-over-IP, MTU: Unlimited, Speed: Unlimited
Link flags : Keepalives DTE
Device flags : Present Running
Interface flags: SNMP-Traps
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input packets : 0
Output packets: 0
Physical interface: lo0, Enabled, Physical link is Up
Interface index: 6, SNMP ifIndex: 6
Type: Loopback, MTU: Unlimited
Device flags : Present Running Loopback
Interface flags: SNMP-Traps
Link flags : None
Last flapped : Never
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input packets : 787
Output packets: 787
Input errors: 0, Output errors: 0
Logical interface lo0.0 (Index 66) (SNMP ifIndex 16)
Flags: SNMP-Traps Encapsulation: Unspecified
Input packets : 7
Output packets: 7
Security: Zone: trust
Allowed host-inbound traffic : bfd bgp dvmrp igmp ldp msdp nhrp ospf ospf3
pgm pim rip ripng router-discovery rsvp sap vrrp ping
Protocol inet, MTU: Unlimited
Flags: Sendbcast-pkt-to-re
Addresses, Flags: Is-Default Is-Primary
Local: 10.64.4.4
Protocol inet6, MTU: Unlimited
Flags: None
Addresses, Flags: Is-Default Is-Primary
Local: 2001::4
Local: fe80::5e00:400f:fc00:0
Logical interface lo0.16384 (Index 65) (SNMP ifIndex 21)
Flags: SNMP-Traps Encapsulation: Unspecified
Input packets : 0
Output packets: 0
Security: Zone: Null
Protocol inet, MTU: Unlimited
Flags: None
Addresses
Local: 127.0.0.1
Logical interface lo0.16385 (Index 67) (SNMP ifIndex 22)
Flags: SNMP-Traps Encapsulation: Unspecified
Input packets : 780
Output packets: 780
Security: Zone: Null
Protocol inet, MTU: Unlimited
Flags: None
Addresses, Flags: Is-Default Is-Primary
Local: 10.0.0.1
Addresses
Local: 10.0.0.16
Addresses
Local: 172.16.64.1
Addresses
Local: 172.16.64.4
Addresses
Local: 172.16.66.16
Logical interface lo0.32768 (Index 64) (SNMP ifIndex 248)
Flags: Encapsulation: Unspecified
Input packets : 0
Output packets: 0
Security: Zone: Null
Physical interface: lsi, Enabled, Physical link is Up
Interface index: 4, SNMP ifIndex: 4
Type: Software-Pseudo, Link-level type: LSI, MTU: 1496, Speed: Unlimited
Device flags : Present Running
Link flags : None
Last flapped : Never
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input packets : 0
Output packets: 0
Input errors: 0, Output errors: 0
Physical interface: mtun, Enabled, Physical link is Up
Interface index: 64, SNMP ifIndex: 12
Type: Multicast-GRE, Link-level type: GRE, MTU: Unlimited, Speed: Unlimited
Link flags : Keepalives DTE
Device flags : Present Running
Interface flags: SNMP-Traps
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input packets : 0
Output packets: 0
Physical interface: pimd, Enabled, Physical link is Up
Interface index: 26, SNMP ifIndex: 11
Type: PIMD, Link-level type: PIM-Decapsulator, MTU: Unlimited,
Speed: Unlimited
Device flags : Present Running
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input packets : 0
Output packets: 0
Physical interface: pime, Enabled, Physical link is Up
Interface index: 25, SNMP ifIndex: 10
Type: PIME, Link-level type: PIM-Encapsulator, MTU: Unlimited,
Speed: Unlimited
Device flags : Present Running
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input packets : 0
Output packets: 0
Physical interface: pp0, Enabled, Physical link is Up
Interface index: 128, SNMP ifIndex: 501
Type: PPPoE, Link-level type: PPPoE, MTU: 1532
Device flags : Present Running
Interface flags: Point-To-Point SNMP-Traps
Link type : Full-Duplex
Link flags : None
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0
Physical interface: ppd0, Enabled, Physical link is Up
Interface index: 130, SNMP ifIndex: 503
Type: PIMD, Link-level type: PIM-Decapsulator, MTU: Unlimited, Speed: 800mbps
Device flags : Present Running
Interface flags: SNMP-Traps
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Physical interface: ppe0, Enabled, Physical link is Up
Interface index: 131, SNMP ifIndex: 504
Type: PIME, Link-level type: PIM-Encapsulator, MTU: Unlimited, Speed: 800mbps
Device flags : Present Running
Interface flags: SNMP-Traps
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Physical interface: st0, Enabled, Physical link is Up
Interface index: 129, SNMP ifIndex: 502
Type: Secure-Tunnel, Link-level type: Secure-Tunnel, MTU: 9192
Device flags : Present Running
Interface flags: Point-To-Point
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Physical interface: tap, Enabled, Physical link is Up
Interface index: 12, SNMP ifIndex: 7
Type: Software-Pseudo, Link-level type: Interface-Specific, MTU: Unlimited,
Speed: Unlimited
Device flags : Present Running
Interface flags: SNMP-Traps
Link flags : None
Last flapped : Never
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input packets : 0
Output packets: 0
Input errors: 0, Output errors: 0
Physical interface: vlan, Enabled, Physical link is Down
Interface index: 132, SNMP ifIndex: 505
Type: VLAN, Link-level type: VLAN, MTU: 1518, Speed: 1000mbps
Device flags : Present Running Down
Interface flags: Hardware-Down
Link type : Full-Duplex
CoS queues : 8 supported, 8 maximum usable queues
Current address: fa:16:3e:ff:a2:b7, Hardware address: fa:16:3e:ff:a2:b7
Last flapped : 2020-06-22 22:29:08 EST (1w1d 00:26 ago)
Statistics last cleared: 2020-06-30 22:23:44 EST (00:32:21 ago)
Input rate : 0 bps (0 pps)
Output rate : 0 bps (0 pps)
Input errors: 0, Output errors: 0"""
}
golden_parsed_output = {
'interface-information': {
'physical-interface': [{
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'admin-status': 'Enabled',
'bpdu-error': 'None',
'current-physical-address': '5e:00:40:ff:00:00',
'hardware-physical-address': '5e:00:40:ff:00:00',
'if-auto-negotiation': 'Enabled',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-flow-control': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'if-remote-fault': 'Online',
'input-error-count': '1568',
'interface-flapped': '2020-06-22 '
'22:33:51 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'interface-transmit-statistics': 'Disabled',
'l2pt-error': 'None',
'link-level-type': 'Ethernet',
'link-mode': 'Full-duplex',
'local-index': '133',
'logical-interface': [{
'address-family': [{
'address-family-flags': {
'ifff-is-primary': True,
'ifff-sendbcast-pkt-to-re': True
},
'address-family-name': 'inet',
'interface-address': [{
'ifa-broadcast': '172.16.1.255',
'ifa-destination': '172.16.1/24',
'ifa-flags': {
'ifaf-current-preferred': True,
'ifaf-current-primary': True
},
'ifa-local': '172.16.1.55'
}],
'mtu': '1500'
}],
'allowed-host-inbound-traffic': {
'inbound-dhcp': True,
'inbound-http': True,
'inbound-https': True,
'inbound-ssh': True,
'inbound-telnet': True
},
'encapsulation': 'ENET2',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'local-index': '70',
'logical-interface-zone-name': 'trust',
'name': 'ge-0/0/0.0',
'snmp-index': '507',
'traffic-statistics': {
'input-packets': '4685',
'output-packets': '144'
}
}],
'loopback': 'Disabled',
'mtu': '1514',
'name': 'ge-0/0/0',
'oper-status': 'Up',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '506',
'source-filtering': 'Disabled',
'speed': '1000mbps',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '712',
'input-pps': '1',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '143',
'name': 'gr-0/0/0',
'oper-status': 'Up',
'snmp-index': '519',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '144',
'name': 'ip-0/0/0',
'oper-status': 'Up',
'snmp-index': '520',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'interface-flapped': '2020-06-22 '
'22:33:52 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'local-index': '145',
'name': 'lsq-0/0/0',
'oper-status': 'Up',
'snmp-index': '521',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'current-physical-address': '02:96:14:ff:11:43',
'hardware-physical-address': '02:96:14:ff:11:43',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-media-flags': {
'ifmf-none': True
},
'input-error-count': '0',
'interface-flapped': 'Never',
'local-index': '147',
'name': 'lt-0/0/0',
'oper-status': 'Up',
'output-error-count': '0',
'snmp-index': '523',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '146',
'name': 'mt-0/0/0',
'oper-status': 'Up',
'snmp-index': '522',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-media-flags': {
'ifmf-none': True
},
'input-error-count': '0',
'interface-flapped': '2020-06-22 '
'22:33:52 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'local-index': '142',
'logical-interface': [{
'address-family': [{
'address-family-name': 'inet',
'mtu': '9192'
},
{
'address-family-name': 'inet6',
'mtu': '9192'
}
],
'local-index': '75',
'logical-interface-zone-name': 'Null',
'name': 'sp-0/0/0.0',
'snmp-index': '518',
'traffic-statistics': {
'input-packets': '0',
'output-packets': '0'
}
},
{
'address-family': [{
'address-family-name': 'inet',
'interface-address': [{
'ifa-destination': '10.0.0.16',
'ifa-flags': {
'ifaf-current-preferred': True,
'ifaf-current-primary': True
},
'ifa-local': '10.0.0.1'
},
{
'ifa-flags': {},
'ifa-local': '10.0.0.6'
},
{
'ifa-destination': '172.16.66.16',
'ifa-flags': {
'ifaf-current-preferred': True
},
'ifa-local': '172.16.64.1'
},
{
'ifa-flags': {},
'ifa-local': '172.16.64.6'
}
],
'mtu': '9192'
}],
'local-index': '76',
'logical-interface-zone-name': 'Null',
'name': 'sp-0/0/0.16383',
'snmp-index': '524',
'traffic-statistics': {
'input-packets': '0',
'output-packets': '0'
}
}
],
'name': 'sp-0/0/0',
'oper-status': 'Up',
'output-error-count': '0',
'snmp-index': '517',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'admin-status': 'Enabled',
'bpdu-error': 'None',
'current-physical-address': 'fa:16:3e:ff:7f:fd',
'hardware-physical-address': 'fa:16:3e:ff:7f:fd',
'if-auto-negotiation': 'Enabled',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-flow-control': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'if-remote-fault': 'Online',
'input-error-count': '8',
'interface-flapped': '2020-06-22 '
'22:34:01 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'interface-transmit-statistics': 'Disabled',
'l2pt-error': 'None',
'link-level-type': 'Ethernet',
'link-mode': 'Full-duplex',
'local-index': '134',
'logical-interface': [{
'address-family': [{
'address-family-flags': {
'ifff-sendbcast-pkt-to-re': True
},
'address-family-name': 'inet',
'interface-address': [{
'ifa-broadcast': '10.70.0.255',
'ifa-destination': '40.0.0/24',
'ifa-flags': {
'ifaf-current-preferred': True,
'ifaf-current-primary': True
},
'ifa-local': '10.70.0.4'
}],
'mtu': '1500'
},
{
'address-family-name': 'inet6',
'interface-address': [{
'ifa-destination': '2001:40::/64',
'ifa-flags': {
'ifaf-current-preferred': True,
'ifaf-current-primary': True
},
'ifa-local': '2001:40::4'
},
{
'ifa-destination': 'fe80::/64',
'ifa-flags': {
'ifaf-current-preferred': True
},
'ifa-local': 'fe80::fa16:3eff:feff:7ffd'
}
],
'mtu': '1500'
}
],
'encapsulation': 'ENET2',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'local-index': '71',
'logical-interface-zone-name': 'trust',
'name': 'ge-0/0/1.0',
'snmp-index': '516',
'traffic-statistics': {
'input-packets': '555',
'output-packets': '546'
}
}],
'loopback': 'Disabled',
'mtu': '1514',
'name': 'ge-0/0/1',
'oper-status': 'Up',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '508',
'source-filtering': 'Disabled',
'speed': '1000mbps',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '312',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'admin-status': 'Enabled',
'bpdu-error': 'None',
'current-physical-address': 'fa:16:3e:ff:0e:52',
'hardware-physical-address': 'fa:16:3e:ff:0e:52',
'if-auto-negotiation': 'Enabled',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-flow-control': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'if-remote-fault': 'Online',
'input-error-count': '2',
'interface-flapped': '2020-06-22 '
'22:34:01 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'interface-transmit-statistics': 'Disabled',
'l2pt-error': 'None',
'link-level-type': 'Ethernet',
'link-mode': 'Full-duplex',
'local-index': '135',
'logical-interface': [{
'address-family': [{
'address-family-flags': {
'ifff-sendbcast-pkt-to-re': True
},
'address-family-name': 'inet',
'interface-address': [{
'ifa-broadcast': '10.205.0.255',
'ifa-destination': '50.0.0/24',
'ifa-flags': {
'ifaf-current-preferred': True,
'ifaf-current-primary': True
},
'ifa-local': '10.205.0.4'
}],
'mtu': '1500'
},
{
'address-family-name': 'inet6',
'interface-address': [{
'ifa-destination': '2001:50::/64',
'ifa-flags': {
'ifaf-current-preferred': True,
'ifaf-current-primary': True
},
'ifa-local': '2001:50::4'
},
{
'ifa-destination': 'fe80::/64',
'ifa-flags': {
'ifaf-current-preferred': True
},
'ifa-local': 'fe80::fa16:3eff:feff:e52'
}
],
'mtu': '1500'
}
],
'encapsulation': 'ENET2',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'local-index': '72',
'logical-interface-zone-name': 'trust',
'name': 'ge-0/0/2.0',
'snmp-index': '525',
'traffic-statistics': {
'input-packets': '450',
'output-packets': '465'
}
}],
'loopback': 'Disabled',
'mtu': '1514',
'name': 'ge-0/0/2',
'oper-status': 'Up',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '509',
'source-filtering': 'Disabled',
'speed': '1000mbps',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '528',
'output-pps': '0'
}
},
{
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'admin-status': 'Enabled',
'bpdu-error': 'None',
'current-physical-address': 'fa:16:3e:ff:12:8e',
'hardware-physical-address': 'fa:16:3e:ff:12:8e',
'if-auto-negotiation': 'Enabled',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-flow-control': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'if-remote-fault': 'Online',
'input-error-count': '0',
'interface-flapped': '2020-06-22 '
'22:34:01 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'interface-transmit-statistics': 'Disabled',
'l2pt-error': 'None',
'link-level-type': 'Ethernet',
'link-mode': 'Full-duplex',
'local-index': '136',
'logical-interface': [{
'address-family': [{
'address-family-flags': {
'ifff-sendbcast-pkt-to-re': True
},
'address-family-name': 'inet',
'mtu': '1500'
},
{
'address-family-name': 'inet6',
'interface-address': [{
'ifa-destination': 'fe80::/64',
'ifa-flags': {
'ifaf-current-preferred': True
},
'ifa-local': 'fe80::fa16:3eff:feff:128e'
}],
'mtu': '1500'
}
],
'encapsulation': 'ENET2',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'local-index': '73',
'logical-interface-zone-name': 'trust',
'name': 'ge-0/0/3.0',
'snmp-index': '526',
'traffic-statistics': {
'input-packets': '0',
'output-packets': '0'
}
}],
'loopback': 'Disabled',
'mtu': '1514',
'name': 'ge-0/0/3',
'oper-status': 'Up',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '510',
'source-filtering': 'Disabled',
'speed': '1000mbps',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'admin-status': 'Enabled',
'bpdu-error': 'None',
'current-physical-address': 'fa:16:3e:ff:37:bd',
'hardware-physical-address': 'fa:16:3e:ff:37:bd',
'if-auto-negotiation': 'Enabled',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-flow-control': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'if-remote-fault': 'Online',
'input-error-count': '0',
'interface-flapped': '2020-06-22 '
'22:34:02 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'interface-transmit-statistics': 'Disabled',
'l2pt-error': 'None',
'link-level-type': 'Ethernet',
'link-mode': 'Full-duplex',
'local-index': '137',
'logical-interface': [{
'address-family': [{
'address-family-flags': {
'ifff-sendbcast-pkt-to-re': True
},
'address-family-name': 'inet',
'mtu': '1500'
},
{
'address-family-name': 'inet6',
'interface-address': [{
'ifa-destination': 'fe80::/64',
'ifa-flags': {
'ifaf-current-preferred': True
},
'ifa-local': 'fe80::fa16:3eff:feff:37bd'
}],
'mtu': '1500'
}
],
'encapsulation': 'ENET2',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'local-index': '74',
'logical-interface-zone-name': 'trust',
'name': 'ge-0/0/4.0',
'snmp-index': '527',
'traffic-statistics': {
'input-packets': '787',
'output-packets': '787'
}
}],
'loopback': 'Disabled',
'mtu': '1514',
'name': 'ge-0/0/4',
'oper-status': 'Up',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '511',
'source-filtering': 'Disabled',
'speed': '1000mbps',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'admin-status': 'Enabled',
'bpdu-error': 'None',
'current-physical-address': 'fa:16:3e:ff:38:28',
'hardware-physical-address': 'fa:16:3e:ff:38:28',
'if-auto-negotiation': 'Enabled',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-flow-control': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'if-remote-fault': 'Online',
'input-error-count': '0',
'interface-flapped': '2020-06-22 '
'22:34:02 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'interface-transmit-statistics': 'Disabled',
'l2pt-error': 'None',
'link-level-type': 'Ethernet',
'link-mode': 'Full-duplex',
'local-index': '138',
'loopback': 'Disabled',
'mtu': '1514',
'name': 'ge-0/0/5',
'oper-status': 'Up',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '512',
'source-filtering': 'Disabled',
'speed': '1000mbps',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'admin-status': 'Enabled',
'bpdu-error': 'None',
'current-physical-address': 'fa:16:3e:ff:54:ea',
'hardware-physical-address': 'fa:16:3e:ff:54:ea',
'if-auto-negotiation': 'Enabled',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-flow-control': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'if-remote-fault': 'Online',
'input-error-count': '0',
'interface-flapped': '2020-06-22 '
'22:34:02 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'interface-transmit-statistics': 'Disabled',
'l2pt-error': 'None',
'link-level-type': 'Ethernet',
'link-mode': 'Full-duplex',
'local-index': '139',
'loopback': 'Disabled',
'mtu': '1514',
'name': 'ge-0/0/6',
'oper-status': 'Up',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '513',
'source-filtering': 'Disabled',
'speed': '1000mbps',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'admin-status': 'Enabled',
'bpdu-error': 'None',
'current-physical-address': 'fa:16:3e:ff:05:a1',
'hardware-physical-address': 'fa:16:3e:ff:05:a1',
'if-auto-negotiation': 'Enabled',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-flow-control': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'if-remote-fault': 'Online',
'input-error-count': '0',
'interface-flapped': '2020-06-22 '
'22:34:02 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'interface-transmit-statistics': 'Disabled',
'l2pt-error': 'None',
'link-level-type': 'Ethernet',
'link-mode': 'Full-duplex',
'local-index': '140',
'loopback': 'Disabled',
'mtu': '1514',
'name': 'ge-0/0/7',
'oper-status': 'Up',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '514',
'source-filtering': 'Disabled',
'speed': '1000mbps',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'admin-status': 'Enabled',
'bpdu-error': 'None',
'current-physical-address': 'fa:16:3e:ff:a2:b7',
'hardware-physical-address': 'fa:16:3e:ff:a2:b7',
'if-auto-negotiation': 'Enabled',
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-flow-control': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'if-remote-fault': 'Online',
'input-error-count': '0',
'interface-flapped': '2020-06-22 '
'22:34:02 '
'EST '
'(1w1d '
'00:22 '
'ago)',
'interface-transmit-statistics': 'Disabled',
'l2pt-error': 'None',
'link-level-type': 'Ethernet',
'link-mode': 'Full-duplex',
'local-index': '141',
'loopback': 'Disabled',
'mtu': '1514',
'name': 'ge-0/0/8',
'oper-status': 'Up',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '515',
'source-filtering': 'Disabled',
'speed': '1000mbps',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-media-flags': {
'ifmf-none': True
},
'input-error-count': '0',
'interface-flapped': 'Never',
'local-index': '5',
'name': 'dsc',
'oper-status': 'Up',
'output-error-count': '0',
'snmp-index': '5',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)'
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '10',
'name': 'gre',
'oper-status': 'Up',
'snmp-index': '8',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)'
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '11',
'name': 'ipip',
'oper-status': 'Up',
'snmp-index': '9',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)'
},
{
'admin-status': 'Enabled',
'if-media-flags': {
'ifmf-none': True
},
'input-error-count': '0',
'interface-flapped': 'Never',
'local-index': '6',
'logical-interface': [{
'address-family': [{
'address-family-flags': {
'ifff-sendbcast-pkt-to-re': True
},
'address-family-name': 'inet',
'interface-address': [{
'ifa-flags': {
'ifaf-current-default': True,
'ifaf-current-primary': True
},
'ifa-local': '10.64.4.4'
}],
'mtu': 'Unlimited'
},
{
'address-family-name': 'inet6',
'interface-address': [{
'ifa-flags': {
'ifaf-current-default': True,
'ifaf-current-primary': True
},
'ifa-local': 'fe80::5e00:400f:fc00:0'
}],
'mtu': 'Unlimited'
}
],
'local-index': '66',
'logical-interface-zone-name': 'trust',
'name': 'lo0.0',
'snmp-index': '16',
'traffic-statistics': {
'input-packets': '7',
'output-packets': '7'
}
},
{
'address-family': [{
'address-family-name': 'inet',
'interface-address': [{
'ifa-flags': {},
'ifa-local': '127.0.0.1'
}],
'mtu': 'Unlimited'
}],
'local-index': '65',
'logical-interface-zone-name': 'Null',
'name': 'lo0.16384',
'snmp-index': '21',
'traffic-statistics': {
'input-packets': '0',
'output-packets': '0'
}
},
{
'address-family': [{
'address-family-name': 'inet',
'interface-address': [{
'ifa-flags': {
'ifaf-current-default': True,
'ifaf-current-primary': True
},
'ifa-local': '10.0.0.1'
},
{
'ifa-flags': {},
'ifa-local': '10.0.0.16'
},
{
'ifa-flags': {},
'ifa-local': '172.16.64.1'
},
{
'ifa-flags': {},
'ifa-local': '172.16.64.4'
},
{
'ifa-flags': {},
'ifa-local': '172.16.66.16'
}
],
'mtu': 'Unlimited'
}],
'local-index': '67',
'logical-interface-zone-name': 'Null',
'name': 'lo0.16385',
'snmp-index': '22',
'traffic-statistics': {
'input-packets': '780',
'output-packets': '780'
}
},
{
'local-index': '64',
'logical-interface-zone-name': 'Null',
'name': 'lo0.32768',
'snmp-index': '248',
'traffic-statistics': {
'input-packets': '0',
'output-packets': '0'
}
}
],
'name': 'lo0',
'oper-status': 'Up',
'output-error-count': '0',
'snmp-index': '6',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)'
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-media-flags': {
'ifmf-none': True
},
'input-error-count': '0',
'interface-flapped': 'Never',
'local-index': '4',
'name': 'lsi',
'oper-status': 'Up',
'output-error-count': '0',
'snmp-index': '4',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)'
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '64',
'name': 'mtun',
'oper-status': 'Up',
'snmp-index': '12',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)'
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '26',
'name': 'pimd',
'oper-status': 'Up',
'snmp-index': '11',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)'
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '25',
'name': 'pime',
'oper-status': 'Up',
'snmp-index': '10',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)'
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-media-flags': {
'ifmf-none': True
},
'input-error-count': '0',
'local-index': '128',
'name': 'pp0',
'oper-status': 'Up',
'output-error-count': '0',
'snmp-index': '501',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '130',
'name': 'ppd0',
'oper-status': 'Up',
'snmp-index': '503',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '131',
'name': 'ppe0',
'oper-status': 'Up',
'snmp-index': '504',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'local-index': '129',
'name': 'st0',
'oper-status': 'Up',
'snmp-index': '502',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
},
{
'admin-status': 'Enabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-media-flags': {
'ifmf-none': True
},
'input-error-count': '0',
'interface-flapped': 'Never',
'local-index': '12',
'name': 'tap',
'oper-status': 'Up',
'output-error-count': '0',
'snmp-index': '7',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)'
},
{
'admin-status': 'Enabled',
'current-physical-address': 'fa:16:3e:ff:a2:b7',
'hardware-physical-address': 'fa:16:3e:ff:a2:b7',
'input-error-count': '0',
'interface-flapped': '2020-06-22 '
'22:29:08 '
'EST '
'(1w1d '
'00:26 '
'ago)',
'local-index': '132',
'name': 'vlan',
'oper-status': 'Down',
'output-error-count': '0',
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'snmp-index': '505',
'statistics-cleared': '2020-06-30 '
'22:23:44 '
'EST '
'(00:32:21 '
'ago)',
'traffic-statistics': {
'input-bps': '0',
'input-pps': '0',
'output-bps': '0',
'output-pps': '0'
}
}
]
}
}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
interface_obj = ShowInterfacesStatistics(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
interface_obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
interface_obj = ShowInterfacesStatistics(device=self.device)
parsed_output = interface_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
class TestShowInterfacesExtensive(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_output = {"execute.return_value": """
show interfaces extensive ge-0/0/0
Physical interface: ge-0/0/0, Enabled, Physical link is Up
Interface index: 148, SNMP ifIndex: 526, Generation: 2988
Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None, Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None,
Loopback: Disabled, Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
Pad to minimum frame size: Disabled
Device flags : Present Running
Interface flags: SNMP-Traps Internal: 0x4000
Link flags : None
CoS queues : 8 supported, 8 maximum usable queues
Schedulers : 0
Hold-times : Up 0 ms, Down 0 ms
Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
Current address: 00:50:56:ff:55:26, Hardware address: 00:50:56:ff:55:26
Last flapped : 2020-08-05 02:58:37 UTC (03:17:16 ago)
Statistics last cleared: 2020-08-05 06:15:50 UTC (00:00:03 ago)
Traffic statistics:
Input bytes : 1900 3912 bps
Output bytes : 648 1544 bps
Input packets: 26 6 pps
Output packets: 7 2 pps
IPv6 transit statistics:
Input bytes : 256
Output bytes : 0
Input packets: 4
Output packets: 0
Dropped traffic statistics due to STP State:
Input bytes : 0
Output bytes : 0
Input packets: 0
Output packets: 0
Input errors:
Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0, L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
Output errors:
Carrier transitions: 0, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0, MTU errors: 0, Resource errors: 0
Egress queues: 8 supported, 4 in use
Queue counters: Queued packets Transmitted packets Dropped packets
0 0 0 0
1 0 0 0
2 0 0 0
3 2 2 0
Queue number: Mapped forwarding classes
0 best-effort
1 expedited-forwarding
2 assured-forwarding
3 network-control
Active alarms : None
Active defects : None
PCS statistics Seconds
Bit errors 0
Errored blocks 0
Ethernet FEC statistics Errors
FEC Corrected Errors 0
FEC Uncorrected Errors 0
FEC Corrected Errors Rate 0
FEC Uncorrected Errors Rate 0
MAC statistics: Receive Transmit
Total octets 2230 404
Total packets 12 2
Unicast packets 27 6
Broadcast packets 0 0
Multicast packets 0 0
CRC/Align errors 0 0
FIFO errors 0 0
MAC control frames 0 0
MAC pause frames 0 0
Oversized frames 0
Jabber frames 0
Fragment frames 0
VLAN tagged frames 0
Code violations 0
Total errors 0 0
Filter statistics:
Input packet count 27
Input packet rejects 22
Input DA rejects 0
Input SA rejects 0
Output packet count 6
Output packet pad count 0
Output packet error count 0
CAM destination filters: 0, CAM source filters: 0
Autonegotiation information:
Negotiation status: Incomplete
Packet Forwarding Engine configuration:
Destination slot: 0 (0x00)
CoS information:
Direction : Output
CoS transmit queue Bandwidth Buffer Priority Limit
%% bps %% usec
0 best-effort 95 950000000 95 0 low none
3 network-control 5 50000000 5 0 low none
Interface transmit statistics: Disabled
Logical interface ge-0/0/0.0 (Index 332) (SNMP ifIndex 537) (Generation 30193)
Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
Traffic statistics:
Input bytes : 1900
Output bytes : 606
Input packets: 26
Output packets: 7
IPv6 transit statistics:
Input bytes : 256
Output bytes : 0
Input packets: 4
Output packets: 0
Local statistics:
Input bytes : 0
Output bytes : 606
Input packets: 0
Output packets: 7
Transit statistics:
Input bytes : 1900 3912 bps
Output bytes : 0 0 bps
Input packets: 26 6 pps
Output packets: 0 0 pps
IPv6 transit statistics:
Input bytes : 256 504 bps
Output bytes : 0 0 bps
Input packets: 4 0 pps
Output packets: 0 0 pps
Protocol inet, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 82967, Route table: 0
Flags: Sendbcast-pkt-to-re
Addresses, Flags: Is-Preferred Is-Primary
Destination: 20.0.0/24, Local: 10.145.0.1, Broadcast: 10.145.0.255, Generation: 172354
Protocol inet6, MTU: 1500
Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 0, Curr new hold cnt: 0, NH drop cnt: 0
Generation: 82968, Route table: 0
Flags: Is-Primary
Addresses, Flags: Is-Preferred Is-Primary
Destination: 2001:20::/64, Local: 2001:20::1
Generation: 172356
Addresses, Flags: Is-Preferred
Destination: fe80::/64, Local: fe80::250:56ff:feff:5526
Protocol multiservice, MTU: Unlimited, Generation: 172358
Generation: 82969, Route table: 0
Flags: Is-Primary
Policer: Input: __default_arp_policer__
"""}
golden_parsed_output = {
'interface-information': {
'physical-interface': [{
'name': 'ge-0/0/0',
'admin-status': {
'@junos:format': 'Enabled'
},
'local-index': '148',
'snmp-index': '526',
'link-level-type': 'Ethernet',
'mtu': '1514',
'mru': '1522',
'sonet-mode': 'LAN-PHY',
'speed': '1000mbps',
'bpdu-error': 'None',
'ld-pdu-error': 'None',
'eth-switch-error': 'None',
'loopback': 'Disabled',
'source-filtering': 'Disabled',
'if-flow-control': 'Enabled',
'if-auto-negotiation': 'Enabled',
'if-remote-fault': 'Online',
'pad-to-minimum-frame-size': 'Disabled',
'if-device-flags': {
'ifdf-present': True,
'ifdf-running': True
},
'if-config-flags': {
'iff-snmp-traps': True,
'internal-flags': '0x4000'
},
'if-media-flags': {
'ifmf-none': True
},
'physical-interface-cos-information': {
'physical-interface-cos-hw-max-queues': '8',
'physical-interface-cos-use-max-queues': '8'
},
'current-physical-address': '00:50:56:ff:55:26',
'hardware-physical-address': '00:50:56:ff:55:26',
'interface-flapped': {
'#text': '2020-08-05 02:58:37 UTC (03:17:16 ago)'
},
'traffic-statistics': {
'input-bytes': '1900',
'input-bps': '3912',
'output-bytes': '648',
'output-bps': '1544',
'input-packets': '26',
'input-pps': '6',
'output-packets': '7',
'output-pps': '2',
'ipv6-transit-statistics': {
'input-bytes': '256',
'output-bytes': '0',
'input-packets': '4',
'output-packets': '0'
}
},
'stp-traffic-statistics': {
'stp-input-bytes-dropped': '0',
'stp-output-bytes-dropped': '0',
'stp-input-packets-dropped': '0',
'stp-output-packets-dropped': '0'
},
'input-error-list': {
'input-errors': '0',
'input-drops': '0',
'framing-errors': '0',
'input-runts': '0',
'input-discards': '0',
'input-l3-incompletes': '0',
'input-l2-channel-errors': '0',
'input-l2-mismatch-timeouts': '0',
'input-fifo-errors': '0',
'input-resource-errors': '0'
},
'output-error-list': {
'carrier-transitions': '0',
'output-errors': '0',
'output-drops': '0',
'output-collisions': '0',
'aged-packets': '0',
'output-fifo-errors': '0',
'hs-link-crc-errors': '0',
'mtu-errors': '0',
'output-resource-errors': '0'
},
'queue-counters': {
'interface-cos-short-summary': {
'intf-cos-num-queues-supported': '8',
'intf-cos-num-queues-in-use': '4'
},
'queue': [{
'queue-number': '0',
'queue-counters-queued-packets': '0',
'queue-counters-trans-packets': '0',
'queue-counters-total-drop-packets': '0'
}, {
'queue-number': '1',
'queue-counters-queued-packets': '0',
'queue-counters-trans-packets': '0',
'queue-counters-total-drop-packets': '0'
}, {
'queue-number': '2',
'queue-counters-queued-packets': '0',
'queue-counters-trans-packets': '0',
'queue-counters-total-drop-packets': '0'
}, {
'queue-number': '3',
'queue-counters-queued-packets': '2',
'queue-counters-trans-packets': '2',
'queue-counters-total-drop-packets': '0'
}]
},
'active-alarms': {
'interface-alarms': {
'alarm-not-present': True
}
},
'active-defects': {
'interface-alarms': {
'alarm-not-present': True
}
},
'ethernet-pcs-statistics': {
'bit-error-seconds': '0',
'errored-blocks-seconds': '0'
},
'ethernet-fec-statistics': {
'fec_ccw_count': '0',
'fec_nccw_count': '0',
'fec_ccw_error_rate': '0',
'fec_nccw_error_rate': '0'
},
'ethernet-mac-statistics': {
'input-bytes': '2230',
'output-bytes': '404',
'input-packets': '12',
'output-packets': '2',
'input-unicasts': '27',
'output-unicasts': '6',
'input-broadcasts': '0',
'output-broadcasts': '0',
'input-multicasts': '0',
'output-multicasts': '0',
'input-crc-errors': '0',
'output-crc-errors': '0',
'input-fifo-errors': '0',
'output-fifo-errors': '0',
'input-mac-control-frames': '0',
'output-mac-control-frames': '0',
'input-mac-pause-frames': '0',
'output-mac-pause-frames': '0',
'input-oversized-frames': '0',
'input-jabber-frames': '0',
'input-fragment-frames': '0',
'input-vlan-tagged-frames': '0',
'input-code-violations': '0'
},
'interface-transmit-statistics': 'Disabled',
'logical-interface': [{
'name': 'ge-0/0/0.0',
'local-index': '332',
'snmp-index': '537',
'if-config-flags': {
'iff-up': True,
'iff-snmp-traps': True,
'internal-flags': '0x4004000'
},
'encapsulation': 'ENET2',
'traffic-statistics': {
'input-bytes': '1900',
'output-bytes': '606',
'input-packets': '26',
'output-packets': '7',
'ipv6-transit-statistics': {
'input-bytes': '0',
'output-bytes': '606',
'input-packets': '0',
'output-packets': '7'
}
},
'transit-traffic-statistics': {
'input-bytes': '1900',
'input-bps': '3912',
'output-bytes': '0',
'output-bps': '0',
'input-packets': '26',
'input-pps': '6',
'output-packets': '0',
'output-pps': '0',
'ipv6-transit-statistics': {
'input-bytes': '256',
'input-bps': '504',
'output-bytes': '0',
'output-bps': '0',
'input-packets': '4',
'input-pps': '0',
'output-packets': '0',
'output-pps': '0'
}
},
'address-family': [{
'address-family-name': 'inet',
'mtu': '1500',
'max-local-cache': '75000',
'new-hold-limit': '75000',
'intf-curr-cnt': '0',
'intf-unresolved-cnt': '0',
'intf-dropcnt': '0',
'address-family-flags': {
'ifff-sendbcast-pkt-to-re': True
},
'interface-address': {
'ifa-flags': {
'ifaf-is-preferred': True,
'ifaf-is-primary': True
}
}
}, {
'address-family-name': 'inet6',
'mtu': '1500',
'max-local-cache': '75000',
'new-hold-limit': '75000',
'intf-curr-cnt': '0',
'intf-unresolved-cnt': '0',
'intf-dropcnt': '0',
'address-family-flags': {
'ifff-is-primary': True
},
'interface-address': [{
'ifa-flags': {
'ifaf-is-preferred': True,
'ifaf-is-primary': True
},
'ifa-destination': '2001:20::/64',
'ifa-local': '2001:20::1'
}, {
'ifa-flags': {
'ifaf-is-preferred': True
},
'ifa-destination': 'fe80::/64',
'ifa-local': 'fe80::250:56ff:feff:5526'
}]
}, {
'address-family-name': 'multiservice',
'mtu': 'Unlimited',
'generation': '172358',
'address-family-flags': {
'ifff-is-primary': True
}
}]
}]
}]
}
}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
interface_obj = ShowInterfacesExtensive(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
interface_obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
interface_obj = ShowInterfacesExtensive(device=self.device)
parsed_output = interface_obj.parse(interface='ge-0/0/0')
self.assertEqual(parsed_output, self.golden_parsed_output)
if __name__ == "__main__":
unittest.main()
| 46.809815
| 191
| 0.363526
| 43,641
| 566,586
| 4.706423
| 0.020898
| 0.026958
| 0.019368
| 0.020668
| 0.949322
| 0.933201
| 0.915518
| 0.903161
| 0.888682
| 0.874986
| 0
| 0.076174
| 0.536506
| 566,586
| 12,104
| 192
| 46.809815
| 0.705951
| 0.000521
| 0
| 0.747682
| 0
| 0.035744
| 0.561779
| 0.047963
| 0
| 0
| 0.001373
| 0
| 0.001517
| 1
| 0.001517
| false
| 0
| 0.000422
| 0
| 0.006154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8a6720132c8f918b41150baaf081e113f26f2fd5
| 68,512
|
py
|
Python
|
kms/scripts/dba_script.py
|
phenixmzy/ranger-2.1.0-with-cdh6.3
|
bb9fb307d72d8fe3bd86ba52ea83ce7d7d02d2bc
|
[
"Apache-2.0"
] | null | null | null |
kms/scripts/dba_script.py
|
phenixmzy/ranger-2.1.0-with-cdh6.3
|
bb9fb307d72d8fe3bd86ba52ea83ce7d7d02d2bc
|
[
"Apache-2.0"
] | null | null | null |
kms/scripts/dba_script.py
|
phenixmzy/ranger-2.1.0-with-cdh6.3
|
bb9fb307d72d8fe3bd86ba52ea83ce7d7d02d2bc
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
import os
import re
import sys
import errno
import shlex
import logging
import platform
import subprocess
import fileinput
import getpass
from os.path import basename
from subprocess import Popen,PIPE
from datetime import date
try: input = raw_input
except NameError: pass
globalDict = {}
os_name = platform.system()
os_name = os_name.upper()
is_unix = os_name == "LINUX" or os_name == "DARWIN"
jisql_debug=True
masked_pwd_string='********'
RANGER_KMS_HOME = os.getenv("RANGER_KMS_HOME")
if RANGER_KMS_HOME is None:
RANGER_KMS_HOME = os.getcwd()
def check_output(query):
if is_unix:
p = subprocess.Popen(shlex.split(query), stdout=subprocess.PIPE)
elif os_name == "WINDOWS":
p = subprocess.Popen(query, stdout=subprocess.PIPE, shell=True)
output = p.communicate ()[0]
return output.decode()
def log(msg,type):
if type == 'info':
logging.info(" %s",msg)
if type == 'debug':
logging.debug(" %s",msg)
if type == 'warning':
logging.warning(" %s",msg)
if type == 'exception':
logging.exception(" %s",msg)
if type == 'error':
logging.error(" %s",msg)
def populate_global_dict():
global globalDict
if is_unix:
read_config_file = open(os.path.join(RANGER_KMS_HOME,'install.properties'))
elif os_name == "WINDOWS":
read_config_file = open(os.path.join(RANGER_KMS_HOME,'bin','install_config.properties'))
library_path = os.path.join(RANGER_KMS_HOME,"cred","lib","*")
read_config_file = open(os.path.join(RANGER_KMS_HOME,'install.properties'))
for each_line in read_config_file.read().split('\n') :
each_line = each_line.strip();
if len(each_line) == 0:
continue
elif each_line[0] == "#":
continue
if re.search('=', each_line):
key , value = each_line.split("=",1)
key = key.strip()
if 'PASSWORD' in key:
value = ''
value = value.strip()
globalDict[key] = value
def logFile(msg):
if globalDict["dryMode"]==True:
logFileName=globalDict["dryModeOutputFile"]
if logFileName !="":
if os.path.isfile(logFileName):
if os.access(logFileName, os.W_OK):
with open(logFileName, "a") as f:
f.write(msg+"\n")
f.close()
else:
log("[E] Unable to open file "+logFileName+" in write mode, Check file permissions.", "error")
sys.exit()
else:
log("[E] "+logFileName+" is Invalid input file name! Provide valid file path to write DBA scripts:", "error")
sys.exit()
else:
log("[E] Invalid input! Provide file path to write DBA scripts:", "error")
sys.exit()
def password_validation(password, userType):
if password:
if re.search("[\\\`'\"]",password):
log("[E] "+userType+" user password contains one of the unsupported special characters like \" ' \ `","error")
sys.exit(1)
else:
log("[I] "+userType+" user password validated","info")
else:
if userType == "DBA root":
log("[I] "+userType+" user password validated","info")
else:
log("[E] Blank password is not allowed,please enter valid password.","error")
sys.exit(1)
def jisql_log(query, db_root_password):
if jisql_debug == True:
if os_name == "WINDOWS":
query = query.replace(' -p "'+db_root_password+'"' , ' -p "'+masked_pwd_string+'"')
log("[JISQL] "+query, "info")
else:
query = query.replace(" -p '"+db_root_password+"'" , " -p '"+masked_pwd_string+"'" )
log("[JISQL] "+query, "info")
class BaseDB(object):
def create_rangerdb_user(self, root_user, db_user, db_password, db_root_password,dryMode):
log("[I] ---------- Creating user ----------", "info")
def check_connection(self, db_name, db_user, db_password):
log("[I] ---------- Verifying DB connection ----------", "info")
def create_db(self, root_user, db_root_password, db_name, db_user, db_password,dryMode):
log("[I] ---------- Verifying database ----------", "info")
class MysqlConf(BaseDB):
# Constructor
def __init__(self, host,SQL_CONNECTOR_JAR,JAVA_BIN,db_ssl_enabled,db_ssl_required,db_ssl_verifyServerCertificate,javax_net_ssl_keyStore,javax_net_ssl_keyStorePassword,javax_net_ssl_trustStore,javax_net_ssl_trustStorePassword,db_ssl_auth_type):
self.host = host.lower()
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
self.db_ssl_enabled=db_ssl_enabled.lower()
self.db_ssl_required=db_ssl_required.lower()
self.db_ssl_verifyServerCertificate=db_ssl_verifyServerCertificate.lower()
self.db_ssl_auth_type=db_ssl_auth_type.lower()
self.javax_net_ssl_keyStore=javax_net_ssl_keyStore
self.javax_net_ssl_keyStorePassword=javax_net_ssl_keyStorePassword
self.javax_net_ssl_trustStore=javax_net_ssl_trustStore
self.javax_net_ssl_trustStorePassword=javax_net_ssl_trustStorePassword
def get_jisql_cmd(self, user, password ,db_name):
#TODO: User array for forming command
path = RANGER_KMS_HOME
db_ssl_param=''
db_ssl_cert_param=''
if self.db_ssl_enabled == 'true':
db_ssl_param="?useSSL=%s&requireSSL=%s&verifyServerCertificate=%s" %(self.db_ssl_enabled,self.db_ssl_required,self.db_ssl_verifyServerCertificate)
if self.db_ssl_verifyServerCertificate == 'true':
if self.db_ssl_auth_type == '1-way':
db_ssl_cert_param=" -Djavax.net.ssl.trustStore=%s -Djavax.net.ssl.trustStorePassword=%s " %(self.javax_net_ssl_trustStore,self.javax_net_ssl_trustStorePassword)
else:
db_ssl_cert_param=" -Djavax.net.ssl.keyStore=%s -Djavax.net.ssl.keyStorePassword=%s -Djavax.net.ssl.trustStore=%s -Djavax.net.ssl.trustStorePassword=%s " %(self.javax_net_ssl_keyStore,self.javax_net_ssl_keyStorePassword,self.javax_net_ssl_trustStore,self.javax_net_ssl_trustStorePassword)
else:
db_ssl_param="?useSSL=false"
if is_unix:
jisql_cmd = "%s %s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -driver mysqlconj -cstring jdbc:mysql://%s/%s%s -u %s -p '%s' -noheader -trim -c \;" %(self.JAVA_BIN,db_ssl_cert_param,self.SQL_CONNECTOR_JAR,path,self.host,db_name,db_ssl_param,user,password)
elif os_name == "WINDOWS":
self.JAVA_BIN = self.JAVA_BIN.strip("'")
jisql_cmd = "%s %s -cp %s;%s\jisql\\lib\\* org.apache.util.sql.Jisql -driver mysqlconj -cstring jdbc:mysql://%s/%s%s -u %s -p \"%s\" -noheader -trim" %(self.JAVA_BIN,db_ssl_cert_param,self.SQL_CONNECTOR_JAR, path, self.host, db_name,db_ssl_param, user, password)
return jisql_cmd
def verify_user(self, root_user, db_root_password, host, db_user, get_cmd,dryMode):
if dryMode == False:
log("[I] Verifying user " + db_user+ " for Host "+ host, "info")
if is_unix:
query = get_cmd + " -query \"select user from mysql.user where user='%s' and host='%s';\"" %(db_user,host)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select user from mysql.user where user='%s' and host='%s';\" -c ;" %(db_user,host)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_user + " |"):
return True
else:
return False
def check_connection(self, db_name, db_user, db_password):
#log("[I] Checking connection..", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if is_unix:
query = get_cmd + " -query \"SELECT version();\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT version();\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('Production |'):
#log("[I] Checking connection passed.", "info")
return True
else:
log("[E] Can't establish db connection.. Exiting.." ,"error")
sys.exit(1)
def create_rangerdb_user(self, root_user, db_user, db_password, db_root_password,dryMode):
if self.check_connection('mysql', root_user, db_root_password):
hosts_arr =["%", "localhost"]
hosts_arr.append(self.host)
for host in hosts_arr:
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'mysql')
if self.verify_user(root_user, db_root_password, host, db_user, get_cmd,dryMode):
if dryMode == False:
log("[I] MySQL user " + db_user + " already exists for host " + host, "info")
else:
if db_password == "":
if dryMode == False:
log("[I] MySQL user " + db_user + " does not exists for host " + host, "info")
if is_unix:
query = get_cmd + " -query \"create user '%s'@'%s';\"" %(db_user, host)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"create user '%s'@'%s';\" -c ;" %(db_user, host)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret == 0:
if self.verify_user(root_user, db_root_password, host, db_user, get_cmd,dryMode):
log("[I] MySQL user " + db_user +" created for host " + host ,"info")
else:
log("[E] Creating MySQL user " + db_user +" failed..","error")
sys.exit(1)
else:
logFile("create user '%s'@'%s';" %(db_user, host))
else:
if dryMode == False:
log("[I] MySQL user " + db_user + " does not exists for host " + host, "info")
if is_unix:
query = get_cmd + " -query \"create user '%s'@'%s' identified by '%s';\"" %(db_user, host, db_password)
query_with_masked_pwd = get_cmd + " -query \"create user '%s'@'%s' identified by '%s';\"" %(db_user, host,masked_pwd_string )
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"create user '%s'@'%s' identified by '%s';\" -c ;" %(db_user, host, db_password)
query_with_masked_pwd = get_cmd + " -query \"create user '%s'@'%s' identified by '%s';\" -c ;" %(db_user, host, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(query)
if ret == 0:
if self.verify_user(root_user, db_root_password, host, db_user, get_cmd,dryMode):
log("[I] MySQL user " + db_user +" created for host " + host ,"info")
else:
log("[E] Creating MySQL user " + db_user +" failed..","error")
sys.exit(1)
else:
log("[E] Creating MySQL user " + db_user +" failed..","error")
sys.exit(1)
else:
logFile("create user '%s'@'%s' identified by '%s';" %(db_user, host,db_password))
def verify_db(self, root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Verifying database " + db_name , "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'mysql')
if is_unix:
query = get_cmd + " -query \"show databases like '%s';\"" %(db_name)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"show databases like '%s';\" -c ;" %(db_name)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_name + " |"):
return True
else:
return False
def create_db(self, root_user, db_root_password, db_name, db_user, db_password,dryMode):
if self.verify_db(root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Database "+db_name + " already exists.","info")
else:
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'mysql')
if is_unix:
query = get_cmd + " -query \"create database %s;\"" %(db_name)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"create database %s;\" -c ;" %(db_name)
if dryMode == False:
log("[I] Database does not exist, Creating database " + db_name,"info")
jisql_log(query, db_root_password)
if is_unix:
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
ret = subprocess.call(query)
if ret != 0:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
if self.verify_db(root_user, db_root_password, db_name,dryMode):
log("[I] Creating database " + db_name + " succeeded", "info")
return True
else:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
logFile("create database %s;" %(db_name))
def grant_xa_db_user(self, root_user, db_name, db_user, db_password, db_root_password, is_revoke,dryMode):
hosts_arr =["%", "localhost"]
hosts_arr.append(self.host)
for host in hosts_arr:
if dryMode == False:
log("[I] ---------- Granting privileges TO user '"+db_user+"'@'"+host+"' on db '"+db_name+"'----------" , "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'mysql')
if is_unix:
query = get_cmd + " -query \"grant all privileges on %s.* to '%s'@'%s' with grant option;\"" %(db_name,db_user, host)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"grant all privileges on %s.* to '%s'@'%s' with grant option;\" -c ;" %(db_name,db_user, host)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] ---------- FLUSH PRIVILEGES ----------" , "info")
if is_unix:
query = get_cmd + " -query \"FLUSH PRIVILEGES;\""
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"FLUSH PRIVILEGES;\" -c ;"
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Privileges granted to '" + db_user + "' on '"+db_name+"'", "info")
else:
log("[E] Granting privileges to '" +db_user+"' failed on '"+db_name+"'", "error")
sys.exit(1)
else:
log("[E] Granting privileges to '" +db_user+"' failed on '"+db_name+"'", "error")
sys.exit(1)
else:
logFile("grant all privileges on %s.* to '%s'@'%s' with grant option;" %(db_name,db_user, host))
def writeDrymodeCmd(self, xa_db_root_user, xa_db_root_password, db_user, db_password, db_name):
logFile("# Login to MySQL Server from a MySQL dba user(i.e 'root') to execute below sql statements.")
hosts_arr =["%", "localhost"]
if not self.host == "localhost": hosts_arr.append(self.host)
for host in hosts_arr:
logFile("create user '%s'@'%s' identified by '%s';" %(db_user, host, db_password))
logFile("create database %s;"%(db_name))
for host in hosts_arr:
logFile("grant all privileges on %s.* to '%s'@'%s' with grant option;"%(db_name, db_user, host))
logFile("FLUSH PRIVILEGES;")
class OracleConf(BaseDB):
# Constructor
def __init__(self, host, SQL_CONNECTOR_JAR, JAVA_BIN):
self.host = host
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
def get_jisql_cmd(self, user, password):
#TODO: User array for forming command
path = RANGER_KMS_HOME
if not re.search('-Djava.security.egd=file:///dev/urandom', self.JAVA_BIN):
self.JAVA_BIN = self.JAVA_BIN + " -Djava.security.egd=file:///dev/urandom "
#if self.host.count(":") == 2:
if self.host.count(":") == 2 or self.host.count(":") == 0:
#jdbc:oracle:thin:@[HOST][:PORT]:SID or #jdbc:oracle:thin:@GL
cstring="jdbc:oracle:thin:@%s" %(self.host)
else:
#jdbc:oracle:thin:@//[HOST][:PORT]/SERVICE
cstring="jdbc:oracle:thin:@//%s" %(self.host)
if is_unix:
jisql_cmd = "%s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -driver oraclethin -cstring %s -u '%s' -p '%s' -noheader -trim" %(self.JAVA_BIN, self.SQL_CONNECTOR_JAR,path, cstring, user, password)
elif os_name == "WINDOWS":
jisql_cmd = "%s -cp %s;%s\jisql\\lib\\* org.apache.util.sql.Jisql -driver oraclethin -cstring %s -u \"%s\" -p \"%s\" -noheader -trim" %(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, cstring, user, password)
return jisql_cmd
def check_connection(self, db_name, db_user, db_password):
log("[I] Checking connection", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password)
if is_unix:
query = get_cmd + " -c \; -query \"select * from v$version;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select * from v$version;\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('Production |'):
log("[I] Connection success", "info")
return True
else:
log("[E] Can't establish connection,Change configuration or Contact Administrator!!", "error")
sys.exit(1)
def verify_user(self, root_user, db_user, db_root_password,dryMode):
if dryMode == False:
log("[I] Verifying user " + db_user ,"info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password)
if is_unix:
query = get_cmd + " -c \; -query \"select username from all_users where upper(username)=upper('%s');\"" %(db_user)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select username from all_users where upper(username)=upper('%s');\" -c ;" %(db_user)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_user + " |"):
return True
else:
return False
def create_rangerdb_user(self, root_user, db_user, db_password, db_root_password,dryMode):
if self.check_connection(self, root_user, db_root_password):
if self.verify_user(root_user, db_user, db_root_password,dryMode):
if dryMode == False:
log("[I] Oracle user " + db_user + " already exists.", "info")
else:
if dryMode == False:
log("[I] User does not exists, Creating user : " + db_user, "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password)
if is_unix:
query = get_cmd + " -c \; -query 'create user %s identified by \"%s\";'" %(db_user, db_password)
query_with_masked_pwd = get_cmd + " -c \; -query 'create user %s identified by \"%s\";'" %(db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"create user %s identified by \"%s\";\" -c ;" %(db_user, db_password)
query_with_masked_pwd = get_cmd + " -query \"create user %s identified by \"%s\";\" -c ;" %(db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(query)
if ret == 0:
if self.verify_user(root_user, db_user, db_root_password,dryMode):
log("[I] User " + db_user + " created", "info")
log("[I] Granting permission to " + db_user, "info")
if is_unix:
query = get_cmd + " -c \; -query 'GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;'" % (db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;\" -c ;" % (db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Granting permissions to Oracle user '" + db_user + "' for %s done" %(self.host), "info")
else:
log("[E] Granting permissions to Oracle user '" + db_user + "' failed..", "error")
sys.exit(1)
else:
log("[E] Creating Oracle user '" + db_user + "' failed..", "error")
sys.exit(1)
else:
log("[E] Creating Oracle user '" + db_user + "' failed..", "error")
sys.exit(1)
else:
logFile("create user %s identified by \"%s\";" %(db_user, db_password))
def verify_tablespace(self, root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Verifying tablespace " + db_name, "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password)
if is_unix:
query = get_cmd + " -c \; -query \"SELECT DISTINCT UPPER(TABLESPACE_NAME) FROM USER_TablespaceS where UPPER(Tablespace_Name)=UPPER(\'%s\');\"" %(db_name)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT DISTINCT UPPER(TABLESPACE_NAME) FROM USER_TablespaceS where UPPER(Tablespace_Name)=UPPER(\'%s\');\" -c ;" %(db_name)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_name+' |'):
return True
else:
return False
def create_db(self, root_user, db_root_password, db_name, db_user, db_password,dryMode):
if self.verify_tablespace(root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Tablespace " + db_name + " already exists.","info")
if self.verify_user(root_user, db_user, db_root_password,dryMode):
get_cmd = self.get_jisql_cmd(db_user ,db_password)
if is_unix:
query = get_cmd + " -c \; -query 'select default_tablespace from user_users;'"
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select default_tablespace from user_users;\" -c ;"
jisql_log(query, db_root_password)
output = check_output(query).strip()
db_name = db_name.upper() +' |'
if output == db_name:
log("[I] User name " + db_user + " and tablespace " + db_name + " already exists.","info")
else:
log("[E] "+db_user + " user already assigned some other tablespace , give some other DB name.","error")
sys.exit(1)
#status = self.assign_tablespace(root_user, db_root_password, db_user, db_password, db_name, False)
#return status
else:
if dryMode == False:
log("[I] Tablespace does not exist. Creating tablespace: " + db_name,"info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password)
if is_unix:
query = get_cmd + " -c \; -query \"create tablespace %s datafile '%s.dat' size 10M autoextend on;\"" %(db_name, db_name)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"create tablespace %s datafile '%s.dat' size 10M autoextend on;\" -c ;" %(db_name, db_name)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret == 0:
if self.verify_tablespace(root_user, db_root_password, db_name,dryMode):
log("[I] Creating tablespace "+db_name+" succeeded", "info")
status=True
status = self.assign_tablespace(root_user, db_root_password, db_user, db_password, db_name, status,dryMode)
return status
else:
log("[E] Creating tablespace "+db_name+" failed..", "error")
sys.exit(1)
else:
log("[E] Creating tablespace "+db_name+" failed..", "error")
sys.exit(1)
else:
logFile("create tablespace %s datafile '%s.dat' size 10M autoextend on;" %(db_name, db_name))
def assign_tablespace(self, root_user, db_root_password, db_user, db_password, db_name, status,dryMode):
if dryMode == False:
log("[I] Assign default tablespace " +db_name + " to " + db_user, "info")
# Assign default tablespace db_name
get_cmd = self.get_jisql_cmd(root_user , db_root_password)
if is_unix:
query = get_cmd +" -c \; -query 'alter user %s DEFAULT Tablespace %s;'" %(db_user, db_name)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd +" -query \"alter user %s DEFAULT Tablespace %s;\" -c ;" %(db_user, db_name)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Granting permission to " + db_user, "info")
if is_unix:
query = get_cmd + " -c \; -query 'GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;'" % (db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;\" -c ;" % (db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Granting Oracle user '" + db_user + "' done", "info")
return status
else:
log("[E] Granting Oracle user '" + db_user + "' failed..", "error")
sys.exit(1)
else:
log("[E] Assigning default tablespace to user '" + db_user + "' failed..", "error")
sys.exit(1)
else:
logFile("alter user %s DEFAULT Tablespace %s;" %(db_user, db_name))
logFile("GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;" % (db_user))
def grant_xa_db_user(self, root_user, db_name, db_user, db_password, db_root_password, invoke,dryMode):
if dryMode == False:
get_cmd = self.get_jisql_cmd(root_user ,db_root_password)
if is_unix:
query = get_cmd + " -c \; -query 'GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;'" % (db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;\" -c ;" % (db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Granted permission to " + db_user, "info")
return True
else:
log("[E] Granting Oracle user '" + db_user + "' failed..", "error")
sys.exit(1)
else:
logFile("GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;" % (db_user))
def writeDrymodeCmd(self, xa_db_root_user, xa_db_root_password, db_user, db_password, db_name):
logFile("# Login to ORACLE Server from a ORACLE dba user(i.e 'sys') to execute below sql statements.")
logFile('create user %s identified by "%s";'%(db_user, db_password))
logFile('GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE ANY SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;'%(db_user))
logFile("create tablespace %s datafile '%s.dat' size 10M autoextend on;" %(db_name, db_name))
logFile('alter user %s DEFAULT Tablespace %s;'%(db_user, db_name))
logFile('GRANT CREATE SESSION,CREATE PROCEDURE,CREATE TABLE,CREATE VIEW,CREATE SEQUENCE,CREATE PUBLIC SYNONYM,CREATE ANY SYNONYM,CREATE TRIGGER,UNLIMITED Tablespace TO %s;'%(db_user))
class PostgresConf(BaseDB):
# Constructor
def __init__(self, host,SQL_CONNECTOR_JAR,JAVA_BIN,db_ssl_enabled,db_ssl_required,db_ssl_verifyServerCertificate,javax_net_ssl_keyStore,javax_net_ssl_keyStorePassword,javax_net_ssl_trustStore,javax_net_ssl_trustStorePassword,db_ssl_auth_type):
self.host = host.lower()
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
self.db_ssl_enabled=db_ssl_enabled.lower()
self.db_ssl_required=db_ssl_required.lower()
self.db_ssl_verifyServerCertificate=db_ssl_verifyServerCertificate.lower()
self.db_ssl_auth_type=db_ssl_auth_type.lower()
self.javax_net_ssl_keyStore=javax_net_ssl_keyStore
self.javax_net_ssl_keyStorePassword=javax_net_ssl_keyStorePassword
self.javax_net_ssl_trustStore=javax_net_ssl_trustStore
self.javax_net_ssl_trustStorePassword=javax_net_ssl_trustStorePassword
def get_jisql_cmd(self, user, password, db_name):
#TODO: User array for forming command
path = RANGER_KMS_HOME
self.JAVA_BIN = self.JAVA_BIN.strip("'")
db_ssl_param=''
db_ssl_cert_param=''
if self.db_ssl_enabled == 'true':
db_ssl_param="?ssl=%s" %(self.db_ssl_enabled)
if self.db_ssl_verifyServerCertificate == 'true' or self.db_ssl_required == 'true':
if self.db_ssl_auth_type == '1-way':
db_ssl_cert_param=" -Djavax.net.ssl.trustStore=%s -Djavax.net.ssl.trustStorePassword=%s " %(self.javax_net_ssl_trustStore,self.javax_net_ssl_trustStorePassword)
else:
db_ssl_cert_param=" -Djavax.net.ssl.keyStore=%s -Djavax.net.ssl.keyStorePassword=%s -Djavax.net.ssl.trustStore=%s -Djavax.net.ssl.trustStorePassword=%s " %(self.javax_net_ssl_keyStore,self.javax_net_ssl_keyStorePassword,self.javax_net_ssl_trustStore,self.javax_net_ssl_trustStorePassword)
else:
db_ssl_param="?ssl=%s&sslfactory=org.postgresql.ssl.NonValidatingFactory" %(self.db_ssl_enabled)
if is_unix:
jisql_cmd = "%s %s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -driver postgresql -cstring jdbc:postgresql://%s/%s%s -u %s -p '%s' -noheader -trim -c \;" %(self.JAVA_BIN, db_ssl_cert_param,self.SQL_CONNECTOR_JAR,path, self.host, db_name, db_ssl_param,user, password)
elif os_name == "WINDOWS":
jisql_cmd = "%s %s -cp %s;%s\jisql\\lib\\* org.apache.util.sql.Jisql -driver postgresql -cstring jdbc:postgresql://%s/%s%s -u %s -p \"%s\" -noheader -trim" %(self.JAVA_BIN, db_ssl_cert_param,self.SQL_CONNECTOR_JAR, path, self.host, db_name, db_ssl_param,user, password)
return jisql_cmd
def verify_user(self, root_user, db_root_password, db_user,dryMode):
if dryMode == False:
log("[I] Verifying user " + db_user , "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'postgres')
if is_unix:
query = get_cmd + " -query \"SELECT rolname FROM pg_roles WHERE rolname='%s';\"" %(db_user)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT rolname FROM pg_roles WHERE rolname='%s';\" -c ;" %(db_user)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_user + " |"):
return True
else:
return False
def check_connection(self, db_name, db_user, db_password):
#log("[I] Checking connection", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if is_unix:
query = get_cmd + " -query \"SELECT 1;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT 1;\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('1 |'):
#log("[I] connection success", "info")
return True
else:
log("[E] Can't establish connection, Please check connection settings or contact Administrator", "error")
sys.exit(1)
def create_rangerdb_user(self, root_user, db_user, db_password, db_root_password,dryMode):
if self.check_connection('postgres', root_user, db_root_password):
if self.verify_user(root_user, db_root_password, db_user,dryMode):
if dryMode == False:
log("[I] Postgres user " + db_user + " already exists.", "info")
else:
if dryMode == False:
log("[I] User does not exists, Creating user : " + db_user, "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'postgres')
if is_unix:
query = get_cmd + " -query \"CREATE USER %s WITH LOGIN PASSWORD '%s';\"" %(db_user, db_password)
query_with_masked_pwd = get_cmd + " -query \"CREATE USER %s WITH LOGIN PASSWORD '%s';\"" %(db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"CREATE USER %s WITH LOGIN PASSWORD '%s';\" -c ;" %(db_user, db_password)
query_with_masked_pwd = get_cmd + " -query \"CREATE USER %s WITH LOGIN PASSWORD '%s';\" -c ;" %(db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(query)
if ret == 0:
if self.verify_user(root_user, db_root_password, db_user,dryMode):
log("[I] Postgres user " + db_user + " created", "info")
else:
log("[E] Postgres user " +db_user+" creation failed..", "error")
sys.exit(1)
else:
log("[E] Postgres user " +db_user+" creation failed..", "error")
sys.exit(1)
else:
logFile("CREATE USER %s WITH LOGIN PASSWORD '%s';" %(db_user, db_password))
def verify_db(self, root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Verifying database " + db_name , "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'postgres')
if is_unix:
query = get_cmd + " -query \"SELECT datname FROM pg_database where datname='%s';\"" %(db_name)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT datname FROM pg_database where datname='%s';\" -c ;" %(db_name)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_name + " |"):
return True
else:
return False
def create_db(self, root_user, db_root_password, db_name, db_user, db_password,dryMode):
if self.verify_db(root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Database "+db_name + " already exists.", "info")
else:
if dryMode == False:
log("[I] Database does not exist, Creating database : " + db_name,"info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'postgres')
if is_unix:
query = get_cmd + " -query \"create database %s with OWNER %s;\"" %(db_name, db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"create database %s with OWNER %s;\" -c ;" %(db_name, db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret != 0:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
if self.verify_db(root_user, db_root_password, db_name,dryMode):
log("[I] Creating database " + db_name + " succeeded", "info")
return True
else:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
logFile("CREATE DATABASE %s WITH OWNER %s;" %(db_name, db_user))
def grant_xa_db_user(self, root_user, db_name, db_user, db_password, db_root_password , is_revoke,dryMode):
if dryMode == False:
log("[I] Granting privileges TO user '"+db_user+"' on db '"+db_name+"'" , "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, db_name)
if is_unix:
query = get_cmd + " -query \"GRANT ALL PRIVILEGES ON DATABASE %s to %s;\"" %(db_name, db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT ALL PRIVILEGES ON DATABASE %s to %s;\" -c ;" %(db_name, db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret != 0:
log("[E] Granting all privileges on database "+db_name+" to user "+db_user+" failed..", "error")
sys.exit(1)
if is_unix:
query = get_cmd + " -query \"GRANT ALL PRIVILEGES ON SCHEMA public TO %s;\"" %(db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT ALL PRIVILEGES ON SCHEMA public TO %s;\" -c ;" %(db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret != 0:
log("[E] Granting all privileges on schema public to user "+db_user+" failed..", "error")
sys.exit(1)
if is_unix:
query = get_cmd + " -query \"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';\""
jisql_log(query, db_root_password)
output = check_output(query)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';\" -c ;"
jisql_log(query, db_root_password)
output = check_output(query)
for each_line in output.split('\n'):
if len(each_line) == 0 : continue
if re.search(' |', each_line):
tablename , value = each_line.strip().split(" |",1)
tablename = tablename.strip()
if is_unix:
query1 = get_cmd + " -query \"GRANT ALL PRIVILEGES ON TABLE %s TO %s;\"" %(tablename,db_user)
jisql_log(query1, db_root_password)
ret = subprocess.call(shlex.split(query1))
if ret != 0:
log("[E] Granting all privileges on tablename "+tablename+" to user "+db_user+" failed..", "error")
sys.exit(1)
elif os_name == "WINDOWS":
query1 = get_cmd + " -query \"GRANT ALL PRIVILEGES ON TABLE %s TO %s;\" -c ;" %(tablename,db_user)
jisql_log(query1, db_root_password)
ret = subprocess.call(query1)
if ret != 0:
log("[E] Granting all privileges on tablename "+tablename+" to user "+db_user+" failed..", "error")
sys.exit(1)
if is_unix:
query = get_cmd + " -query \"SELECT sequence_name FROM information_schema.sequences where sequence_schema='public';\""
jisql_log(query, db_root_password)
output = check_output(query)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT sequence_name FROM information_schema.sequences where sequence_schema='public';\" -c ;"
jisql_log(query, db_root_password)
output = check_output(query)
for each_line in output.split('\n'):
if len(each_line) == 0 : continue
if re.search(' |', each_line):
sequence_name , value = each_line.strip().split(" |",1)
sequence_name = sequence_name.strip()
if is_unix:
query1 = get_cmd + " -query \"GRANT ALL PRIVILEGES ON SEQUENCE %s TO %s;\"" %(sequence_name,db_user)
jisql_log(query1, db_root_password)
ret = subprocess.call(shlex.split(query1))
if ret != 0:
log("[E] Granting all privileges on sequence "+sequence_name+" to user "+db_user+" failed..", "error")
sys.exit(1)
elif os_name == "WINDOWS":
query1 = get_cmd + " -query \"GRANT ALL PRIVILEGES ON SEQUENCE %s TO %s;\" -c ;" %(sequence_name,db_user)
jisql_log(query1, db_root_password)
ret = subprocess.call(query1)
if ret != 0:
log("[E] Granting all privileges on sequence "+sequence_name+" to user "+db_user+" failed..", "error")
sys.exit(1)
log("[I] Granting privileges TO user '"+db_user+"' on db '"+db_name+"' Done" , "info")
else:
logFile("GRANT ALL PRIVILEGES ON DATABASE %s to %s;" %(db_name, db_user))
logFile("GRANT ALL PRIVILEGES ON SCHEMA public TO %s;" %( db_user))
logFile("GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO %s;" %(db_user))
logFile("GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO %s;" %(db_user))
def writeDrymodeCmd(self, xa_db_root_user, xa_db_root_password, db_user, db_password, db_name):
logFile("# Login to POSTGRES Server from a POSTGRES dba user(i.e 'postgres') to execute below sql statements.")
logFile("CREATE USER %s WITH LOGIN PASSWORD '%s';" %(db_user, db_password))
logFile("CREATE DATABASE %s WITH OWNER %s;" %(db_name, db_user))
logFile("# Login to POSTGRES Server from a POSTGRES dba user(i.e 'postgres') on '%s' database to execute below sql statements."%(db_name))
logFile("GRANT ALL PRIVILEGES ON DATABASE %s to %s;" %(db_name, db_user))
logFile("GRANT ALL PRIVILEGES ON SCHEMA public TO %s;" %(db_user))
class SqlServerConf(BaseDB):
# Constructor
def __init__(self, host, SQL_CONNECTOR_JAR, JAVA_BIN):
self.host = host
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
def get_jisql_cmd(self, user, password, db_name):
#TODO: User array for forming command
path = RANGER_KMS_HOME
self.JAVA_BIN = self.JAVA_BIN.strip("'")
if is_unix:
jisql_cmd = "%s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -user %s -p '%s' -driver mssql -cstring jdbc:sqlserver://%s\\;databaseName=%s -noheader -trim"%(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path,user, password, self.host,db_name)
elif os_name == "WINDOWS":
jisql_cmd = "%s -cp %s;%s\\jisql\\lib\\* org.apache.util.sql.Jisql -user %s -p \"%s\" -driver mssql -cstring jdbc:sqlserver://%s;databaseName=%s -noheader -trim"%(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, user, password, self.host,db_name)
return jisql_cmd
def verify_user(self, root_user, db_root_password, db_user,dryMode):
if dryMode == False:
log("[I] Verifying user " + db_user , "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'master')
if is_unix:
query = get_cmd + " -c \; -query \"select name from sys.sql_logins where name = '%s';\"" %(db_user)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select name from sys.sql_logins where name = '%s';\" -c ;" %(db_user)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_user + " |"):
return True
else:
return False
def check_connection(self, db_name, db_user, db_password):
log("[I] Checking connection", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if is_unix:
query = get_cmd + " -c \; -query \"SELECT 1;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT 1;\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('1 |'):
log("[I] Connection success", "info")
return True
else:
log("[E] Can't establish connection", "error")
sys.exit(1)
def create_rangerdb_user(self, root_user, db_user, db_password, db_root_password,dryMode):
if self.check_connection('master', root_user, db_root_password):
if self.verify_user(root_user, db_root_password, db_user,dryMode):
if dryMode == False:
log("[I] SQL Server user " + db_user + " already exists.", "info")
else:
if dryMode == False:
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'master')
log("[I] User does not exists, Creating Login user " + db_user, "info")
if is_unix:
query = get_cmd + " -c \; -query \"CREATE LOGIN %s WITH PASSWORD = '%s';\"" %(db_user,db_password)
query_with_masked_pwd = get_cmd + " -c \; -query \"CREATE LOGIN %s WITH PASSWORD = '%s';\"" %(db_user,masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"CREATE LOGIN %s WITH PASSWORD = '%s';\" -c ;" %(db_user,db_password)
query_with_masked_pwd = get_cmd + " -query \"CREATE LOGIN %s WITH PASSWORD = '%s';\" -c ;" %(db_user,masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(query)
if ret == 0:
if self.verify_user(root_user, db_root_password, db_user,dryMode):
log("[I] SQL Server user " + db_user + " created", "info")
else:
log("[E] SQL Server user " +db_user+" creation failed..", "error")
sys.exit(1)
else:
log("[E] SQL Server user " +db_user+" creation failed..", "error")
sys.exit(1)
else:
logFile("CREATE LOGIN %s WITH PASSWORD = '%s';" %(db_user,db_password))
def verify_db(self, root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Verifying database " + db_name, "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'master')
if is_unix:
query = get_cmd + " -c \; -query \"SELECT name from sys.databases where name='%s';\"" %(db_name)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT name from sys.databases where name='%s';\" -c ;" %(db_name)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_name + " |"):
return True
else:
return False
def create_db(self, root_user, db_root_password, db_name, db_user, db_password,dryMode):
if self.verify_db(root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Database " + db_name + " already exists.","info")
else:
if dryMode == False:
log("[I] Database does not exist. Creating database : " + db_name,"info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, 'master')
if is_unix:
query = get_cmd + " -c \; -query \"create database %s;\"" %(db_name)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"create database %s;\" -c ;" %(db_name)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret != 0:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
if self.verify_db(root_user, db_root_password, db_name,dryMode):
self.create_user(root_user, db_name ,db_user, db_password, db_root_password,dryMode)
log("[I] Creating database " + db_name + " succeeded", "info")
return True
else:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
logFile("create database %s;" %(db_name))
def create_user(self, root_user, db_name ,db_user, db_password, db_root_password,dryMode):
get_cmd = self.get_jisql_cmd(root_user, db_root_password, db_name)
if is_unix:
query = get_cmd + " -c \; -query \"USE %s SELECT name FROM sys.database_principals WHERE name = N'%s';\"" %(db_name, db_user)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"USE %s SELECT name FROM sys.database_principals WHERE name = N'%s';\" -c ;" %(db_name, db_user)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_user + " |"):
if dryMode == False:
log("[I] User "+db_user+" exist ","info")
else:
if dryMode == False:
if is_unix:
query = get_cmd + " -c \; -query \"USE %s CREATE USER %s for LOGIN %s;\"" %(db_name ,db_user, db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"USE %s CREATE USER %s for LOGIN %s;\" -c ;" %(db_name ,db_user, db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret == 0:
if is_unix:
query = get_cmd + " -c \; -query \"USE %s SELECT name FROM sys.database_principals WHERE name = N'%s';\"" %(db_name ,db_user)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"USE %s SELECT name FROM sys.database_principals WHERE name = N'%s';\" -c ;" %(db_name ,db_user)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_user + " |"):
log("[I] User "+db_user+" exist ","info")
else:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
logFile("USE %s CREATE USER %s for LOGIN %s;" %(db_name ,db_user, db_user))
def grant_xa_db_user(self, root_user, db_name, db_user, db_password, db_root_password, is_revoke,dryMode):
if dryMode == False:
log("[I] Granting permission to admin user '" + db_user + "' on db '" + db_name + "'" , "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, db_name)
if is_unix:
query = get_cmd + " -c \; -query \" EXEC sp_addrolemember N'db_owner', N'%s';\"" %(db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \" EXEC sp_addrolemember N'db_owner', N'%s';\" -c ;" %(db_user)
jisql_log(query, db_root_password)
ret = subprocess.call(query)
if ret != 0:
sys.exit(1)
else:
logFile("EXEC sp_addrolemember N'db_owner', N'%s';" %(db_user))
def writeDrymodeCmd(self, xa_db_root_user, xa_db_root_password, db_user, db_password, db_name):
logFile("# Login to MSSQL Server from a MSSQL dba user(i.e 'sa') to execute below sql statements.")
logFile("CREATE LOGIN %s WITH PASSWORD = '%s';" %(db_user, db_password))
logFile("create database %s;" %(db_name))
logFile("# Login to MSSQL Server from a MSSQL dba user(i.e 'sa') on '%s' database to execute below sql statements."%(db_name))
logFile("USE %s CREATE USER %s for LOGIN %s;" %(db_name, db_user, db_user))
logFile("EXEC sp_addrolemember N'db_owner', N'%s';" %(db_user))
class SqlAnywhereConf(BaseDB):
# Constructor
def __init__(self, host, SQL_CONNECTOR_JAR, JAVA_BIN):
self.host = host
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
def get_jisql_cmd(self, user, password, db_name):
#TODO: User array for forming command
path = RANGER_KMS_HOME
self.JAVA_BIN = self.JAVA_BIN.strip("'")
if is_unix:
jisql_cmd = "%s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -user %s -p '%s' -driver sapsajdbc4 -cstring jdbc:sqlanywhere:database=%s;host=%s -noheader -trim"%(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path,user, password,db_name,self.host)
elif os_name == "WINDOWS":
jisql_cmd = "%s -cp %s;%s\\jisql\\lib\\* org.apache.util.sql.Jisql -user %s -p \"%s\" -driver sapsajdbc4 -cstring jdbc:sqlanywhere:database=%s;host=%s -noheader -trim"%(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, user, password,db_name,self.host)
return jisql_cmd
def verify_user(self, root_user, db_root_password, db_user,dryMode):
if dryMode == False:
log("[I] Verifying user " + db_user , "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, '')
if is_unix:
query = get_cmd + " -c \; -query \"select name from syslogins where name = '%s';\"" %(db_user)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select name from syslogins where name = '%s';\" -c ;" %(db_user)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_user + " |"):
return True
else:
return False
def check_connection(self, db_name, db_user, db_password):
log("[I] Checking connection", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if is_unix:
query = get_cmd + " -c \; -query \"SELECT 1;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT 1;\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('1 |'):
log("[I] Connection success", "info")
return True
else:
log("[E] Can't establish connection", "error")
sys.exit(1)
def create_rangerdb_user(self, root_user, db_user, db_password, db_root_password,dryMode):
if self.check_connection('', root_user, db_root_password):
if self.verify_user(root_user, db_root_password, db_user,dryMode):
if dryMode == False:
log("[I] SQL Anywhere user " + db_user + " already exists.", "info")
else:
if dryMode == False:
get_cmd = self.get_jisql_cmd(root_user, db_root_password, '')
log("[I] User does not exists, Creating Login user " + db_user, "info")
if is_unix:
query = get_cmd + " -c \; -query \"CREATE USER %s IDENTIFIED BY '%s';\"" %(db_user,db_password)
query_with_masked_pwd = get_cmd + " -c \; -query \"CREATE USER %s IDENTIFIED BY '%s';\"" %(db_user,masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"CREATE USER %s IDENTIFIED BY '%s';\" -c ;" %(db_user,db_password)
query_with_masked_pwd = get_cmd + " -c \; -query \"CREATE USER %s IDENTIFIED BY '%s';\"" %(db_user,masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(query)
if ret == 0:
if self.verify_user(root_user, db_root_password, db_user,dryMode):
log("[I] SQL Anywhere user " + db_user + " created", "info")
else:
log("[E] SQL Anywhere user " +db_user+" creation failed..", "error")
sys.exit(1)
else:
log("[E] SQL Anywhere user " +db_user+" creation failed..", "error")
sys.exit(1)
else:
logFile("CREATE USER %s IDENTIFIED BY = '%s';" %(db_user,db_password))
def verify_db(self, root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Verifying database " + db_name, "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, '')
if is_unix:
query = get_cmd + " -c \; -query \"select alias from sa_db_info() where alias='%s';\"" %(db_name)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select alias from sa_db_info() where alias='%s';\" -c ;" %(db_name)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_name + " |"):
return True
else:
return False
def create_db(self, root_user, db_root_password, db_name, db_user, db_password,dryMode):
if self.verify_db(root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Database " + db_name + " already exists.","info")
else:
if dryMode == False:
log("[I] Database does not exist. Creating database : " + db_name,"info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, '')
if is_unix:
query = get_cmd + " -c \; -query \"create database '%s' dba user '%s' dba password '%s' database size 100MB;\"" %(db_name,db_user, db_password)
query_with_masked_pwd = get_cmd + " -c \; -query \"create database '%s' dba user '%s' dba password '%s' database size 100MB;\"" %(db_name,db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"create database '%s' dba user '%s' dba password '%s' database size 100MB;\" -c ;" %(db_name,db_user, db_password)
query_with_masked_pwd = get_cmd + " -query \"create database '%s' dba user '%s' dba password '%s' database size 100MB;\" -c ;" %(db_name,db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(query)
if ret != 0:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
self.start_db(root_user, db_root_password, db_name,dryMode)
if self.verify_db(root_user, db_root_password, db_name,dryMode):
self.create_user(root_user, db_name ,db_user, db_password, db_root_password,dryMode)
log("[I] Creating database " + db_name + " succeeded", "info")
return True
else:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
logFile("create database '%s' dba user '%s' dba password '%s' database size 100MB;" %(db_name,db_user, db_password))
def create_user(self, root_user, db_name ,db_user, db_password, db_root_password,dryMode):
get_cmd = self.get_jisql_cmd(root_user, db_root_password, '')
if is_unix:
query = get_cmd + " -c \; -query \"select name from syslogins where name ='%s';\"" %(db_user)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select name from syslogins where name ='%s';\" -c ;" %(db_user)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_user + " |"):
if dryMode == False:
log("[I] User "+db_user+" exist ","info")
else:
if dryMode == False:
if is_unix:
query = get_cmd + " -c \; -query \"CREATE USER %s IDENTIFIED BY '%s';\"" %(db_user, db_password)
query_with_masked_pwd = get_cmd + " -c \; -query \"CREATE USER %s IDENTIFIED BY '%s';\"" %(db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"CREATE USER %s IDENTIFIED BY '%s';\" -c ;" %(db_user, db_password)
query_with_masked_pwd = get_cmd + " -query \"CREATE USER %s IDENTIFIED BY '%s';\" -c ;" %(db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(query)
if ret == 0:
if is_unix:
query = get_cmd + " -c \; -query \"select name from syslogins where name ='%s';\"" %(db_user)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select name from syslogins where name ='%s';\" -c ;" %(db_user)
jisql_log(query, db_root_password)
output = check_output(query)
if output.strip(db_user + " |"):
log("[I] User "+db_user+" exist ","info")
else:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
log("[E] Database creation failed..","error")
sys.exit(1)
else:
logFile("CREATE USER %s IDENTIFIED BY '%s';" %(db_user, db_password))
def grant_xa_db_user(self, root_user, db_name, db_user, db_password, db_root_password, is_revoke,dryMode):
if dryMode == False:
log("[I] Granting permission to user '" + db_user + "' on db '" + db_name + "'" , "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, '')
if is_unix:
query = get_cmd + " -c \; -query \"GRANT CONNECT to %s IDENTIFIED BY '%s';\"" %(db_user, db_password)
query_with_masked_pwd = get_cmd + " -c \; -query \"GRANT CONNECT to %s IDENTIFIED BY '%s';\"" %(db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT CONNECT to %s IDENTIFIED BY '%s';\" -c ;" %(db_user, db_password)
query_with_masked_pwd = get_cmd + " -query \"GRANT CONNECT to %s IDENTIFIED BY '%s';\" -c ;" %(db_user, masked_pwd_string)
jisql_log(query_with_masked_pwd, db_root_password)
ret = subprocess.call(query)
if ret != 0:
sys.exit(1)
else:
logFile("GRANT CONNECT to %s IDENTIFIED BY '%s';" %(db_user, db_password))
def start_db(self,root_user, db_root_password, db_name,dryMode):
if dryMode == False:
log("[I] Starting database " + db_name, "info")
get_cmd = self.get_jisql_cmd(root_user, db_root_password, '')
if is_unix:
query = get_cmd + " -c \; -query \"start database '%s' autostop off;\"" %(db_name)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"start database '%s' autostop off;\" -c ;" %(db_name)
jisql_log(query, db_root_password)
output = check_output(query)
def writeDrymodeCmd(self, xa_db_root_user, xa_db_root_password, db_user, db_password, db_name):
logFile("# Login to SQL Anywhere Server from a SQLA dba user(i.e 'dba') to execute below sql statements.")
logFile("CREATE USER %s IDENTIFIED BY '%s';" %(db_user, db_password))
logFile("create database '%s' dba user '%s' dba password '%s' database size 100MB;" %(db_name, db_user ,db_password))
logFile("start database '%s' autostop off;" %(db_name))
logFile("GRANT CONNECT to %s IDENTIFIED BY '%s';" %(db_user, db_password))
def main(argv):
FORMAT = '%(asctime)-15s %(message)s'
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
DBA_MODE = 'TRUE'
quiteMode = False
dryMode=False
is_revoke=False
if len(argv) > 1:
for i in range(len(argv)):
if str(argv[i]) == "-q":
quiteMode = True
populate_global_dict()
if str(argv[i]) == "-d":
index=i+1
try:
dba_sql_file=str(argv[index])
if dba_sql_file == "":
log("[E] Invalid input! Provide file path to write DBA scripts:","error")
sys.exit(1)
except IndexError:
log("[E] Invalid input! Provide file path to write DBA scripts:","error")
sys.exit(1)
if not dba_sql_file == "":
if not os.path.exists(dba_sql_file):
log("[I] Creating File:"+dba_sql_file,"info")
open(dba_sql_file, 'w').close()
else:
log("[I] File "+dba_sql_file+ " is available.","info")
if os.path.isfile(dba_sql_file):
dryMode=True
globalDict["dryMode"]=True
globalDict["dryModeOutputFile"]=dba_sql_file
else:
log("[E] Invalid file Name! Unable to find file:"+dba_sql_file,"error")
sys.exit(1)
log("[I] Running DBA setup script. QuiteMode:" + str(quiteMode),"info")
if (quiteMode):
if os.environ['JAVA_HOME'] == "":
log("[E] ---------- JAVA_HOME environment property not defined, aborting installation. ----------", "error")
sys.exit(1)
else:
JAVA_BIN=os.path.join(os.environ['JAVA_HOME'],'bin','java')
if os_name == "WINDOWS" :
JAVA_BIN = JAVA_BIN+'.exe'
if os.path.isfile(JAVA_BIN):
pass
else:
JAVA_BIN=globalDict['JAVA_BIN']
if os.path.isfile(JAVA_BIN):
pass
else:
log("[E] ---------- JAVA Not Found, aborting installation. ----------", "error")
sys.exit(1)
log("[I] Using Java:" + str(JAVA_BIN),"info")
else:
JAVA_BIN=''
if not dryMode:
if os.environ['JAVA_HOME'] == "":
log("[E] ---------- JAVA_HOME environment property not defined, aborting installation. ----------", "error")
sys.exit(1)
JAVA_BIN=os.path.join(os.environ['JAVA_HOME'],'bin','java')
if os_name == "WINDOWS" :
JAVA_BIN = JAVA_BIN+'.exe'
if os.path.isfile(JAVA_BIN):
pass
else :
while os.path.isfile(JAVA_BIN) == False:
log("Enter java executable path: :","info")
JAVA_BIN=input()
log("[I] Using Java:" + str(JAVA_BIN),"info")
if (quiteMode):
XA_DB_FLAVOR=globalDict['DB_FLAVOR']
else:
XA_DB_FLAVOR=''
while XA_DB_FLAVOR == "":
log("Enter db flavour{MYSQL|ORACLE|POSTGRES|MSSQL|SQLA} :","info")
XA_DB_FLAVOR=input()
XA_DB_FLAVOR = XA_DB_FLAVOR.upper()
log("[I] DB FLAVOR:" + str(XA_DB_FLAVOR),"info")
if (quiteMode):
CONNECTOR_JAR=globalDict['SQL_CONNECTOR_JAR']
else:
CONNECTOR_JAR=''
if not dryMode:
if XA_DB_FLAVOR == "MYSQL" or XA_DB_FLAVOR == "ORACLE" or XA_DB_FLAVOR == "POSTGRES" or XA_DB_FLAVOR == "MSSQL":
log("Enter JDBC connector file for :"+XA_DB_FLAVOR,"info")
CONNECTOR_JAR=input()
while os.path.isfile(CONNECTOR_JAR) == False:
log("JDBC connector file "+CONNECTOR_JAR+" does not exist, Please enter connector path :","error")
CONNECTOR_JAR=input()
else:
log("[E] ---------- NO SUCH SUPPORTED DB FLAVOUR.. ----------", "error")
sys.exit(1)
if (quiteMode):
xa_db_host = globalDict['db_host']
log("[I] DB Host:" + str(xa_db_host),"info")
else:
if (dryMode):
xa_db_host='127.0.0.1'
else:
xa_db_host=''
while xa_db_host == "":
log("Enter DB Host :","info")
xa_db_host=input()
if (quiteMode):
xa_db_root_user = globalDict['db_root_user']
xa_db_root_password = globalDict['db_root_password']
else:
if (dryMode):
xa_db_root_user='db_root_user'
xa_db_root_password='*****'
else:
xa_db_root_user=''
while xa_db_root_user == "":
log("Enter db root user:","info")
xa_db_root_user=input()
log("Enter db root password:","info")
xa_db_root_password = getpass.getpass("Enter db root password:")
if (quiteMode):
db_name = globalDict['db_name']
else:
if (dryMode):
db_name='ranger_kms_db'
else:
db_name = ''
while db_name == "":
log("Enter DB Name :","info")
db_name=input()
if (quiteMode):
db_user = globalDict['db_user']
else:
if (dryMode):
db_user='ranger_kms_user'
else:
db_user=''
while db_user == "":
log("Enter db user name:","info")
db_user=input()
if (quiteMode):
db_password = globalDict['db_password']
else:
if (dryMode):
db_password='*****'
else:
db_password=''
while db_password == "":
log("Enter db user password:","info")
db_password = getpass.getpass("Enter db user password:")
mysql_core_file = os.path.join('db','mysql','xa_core_db.sql')
oracle_core_file = os.path.join('db','oracle','xa_core_db_oracle.sql')
postgres_core_file = os.path.join('db','postgres','xa_core_db_postgres.sql')
sqlserver_core_file = os.path.join('db','sqlserver','xa_core_db_sqlserver.sql')
sqlanywhere_core_file = os.path.join('db','sqlanywhere','xa_core_db_sqlanywhere.sql')
x_db_version = 'x_db_version_h'
x_user = 'x_portal_user'
db_ssl_enabled='false'
db_ssl_required='false'
db_ssl_verifyServerCertificate='false'
db_ssl_auth_type='2-way'
javax_net_ssl_keyStore=''
javax_net_ssl_keyStorePassword=''
javax_net_ssl_trustStore=''
javax_net_ssl_trustStorePassword=''
if XA_DB_FLAVOR == "MYSQL" or XA_DB_FLAVOR == "POSTGRES":
if 'db_ssl_enabled' in globalDict:
db_ssl_enabled=globalDict['db_ssl_enabled'].lower()
if db_ssl_enabled == 'true':
if 'db_ssl_required' in globalDict:
db_ssl_required=globalDict['db_ssl_required'].lower()
if 'db_ssl_verifyServerCertificate' in globalDict:
db_ssl_verifyServerCertificate=globalDict['db_ssl_verifyServerCertificate'].lower()
if 'db_ssl_auth_type' in globalDict:
db_ssl_auth_type=globalDict['db_ssl_auth_type'].lower()
if db_ssl_verifyServerCertificate == 'true':
if 'javax_net_ssl_trustStore' in globalDict:
javax_net_ssl_trustStore=globalDict['javax_net_ssl_trustStore']
if 'javax_net_ssl_trustStorePassword' in globalDict:
javax_net_ssl_trustStorePassword=globalDict['javax_net_ssl_trustStorePassword']
if not os.path.exists(javax_net_ssl_trustStore):
log("[E] Invalid file Name! Unable to find truststore file:"+javax_net_ssl_trustStore,"error")
sys.exit(1)
if javax_net_ssl_trustStorePassword is None or javax_net_ssl_trustStorePassword =="":
log("[E] Invalid ssl truststore password!","error")
sys.exit(1)
if db_ssl_auth_type == '2-way':
if 'javax_net_ssl_keyStore' in globalDict:
javax_net_ssl_keyStore=globalDict['javax_net_ssl_keyStore']
if 'javax_net_ssl_keyStorePassword' in globalDict:
javax_net_ssl_keyStorePassword=globalDict['javax_net_ssl_keyStorePassword']
if not os.path.exists(javax_net_ssl_keyStore):
log("[E] Invalid file Name! Unable to find keystore file:"+javax_net_ssl_keyStore,"error")
sys.exit(1)
if javax_net_ssl_keyStorePassword is None or javax_net_ssl_keyStorePassword =="":
log("[E] Invalid ssl keystore password!","error")
sys.exit(1)
if XA_DB_FLAVOR == "MYSQL":
MYSQL_CONNECTOR_JAR=CONNECTOR_JAR
xa_sqlObj = MysqlConf(xa_db_host, MYSQL_CONNECTOR_JAR, JAVA_BIN,db_ssl_enabled,db_ssl_required,db_ssl_verifyServerCertificate,javax_net_ssl_keyStore,javax_net_ssl_keyStorePassword,javax_net_ssl_trustStore,javax_net_ssl_trustStorePassword,db_ssl_auth_type)
xa_db_core_file = os.path.join(RANGER_KMS_HOME,mysql_core_file)
elif XA_DB_FLAVOR == "ORACLE":
ORACLE_CONNECTOR_JAR=CONNECTOR_JAR
if xa_db_root_user.upper() == "SYS":
xa_db_root_user = xa_db_root_user+" AS SYSDBA"
xa_sqlObj = OracleConf(xa_db_host, ORACLE_CONNECTOR_JAR, JAVA_BIN)
xa_db_core_file = os.path.join(RANGER_KMS_HOME,oracle_core_file)
elif XA_DB_FLAVOR == "POSTGRES":
db_user=db_user.lower()
db_name=db_name.lower()
POSTGRES_CONNECTOR_JAR=CONNECTOR_JAR
xa_sqlObj = PostgresConf(xa_db_host, POSTGRES_CONNECTOR_JAR, JAVA_BIN,db_ssl_enabled,db_ssl_required,db_ssl_verifyServerCertificate,javax_net_ssl_keyStore,javax_net_ssl_keyStorePassword,javax_net_ssl_trustStore,javax_net_ssl_trustStorePassword,db_ssl_auth_type)
xa_db_core_file = os.path.join(RANGER_KMS_HOME,postgres_core_file)
elif XA_DB_FLAVOR == "MSSQL":
SQLSERVER_CONNECTOR_JAR=CONNECTOR_JAR
xa_sqlObj = SqlServerConf(xa_db_host, SQLSERVER_CONNECTOR_JAR, JAVA_BIN)
xa_db_core_file = os.path.join(RANGER_KMS_HOME,sqlserver_core_file)
elif XA_DB_FLAVOR == "SQLA":
if not os_name == "WINDOWS" :
if os.environ['LD_LIBRARY_PATH'] == "":
log("[E] ---------- LD_LIBRARY_PATH environment property not defined, aborting installation. ----------", "error")
sys.exit(1)
SQLANYWHERE_CONNECTOR_JAR=CONNECTOR_JAR
xa_sqlObj = SqlAnywhereConf(xa_db_host, SQLANYWHERE_CONNECTOR_JAR, JAVA_BIN)
xa_db_core_file = os.path.join(RANGER_KMS_HOME,sqlanywhere_core_file)
else:
log("[E] ---------- NO SUCH SUPPORTED DB FLAVOUR.. ----------", "error")
sys.exit(1)
if not dryMode:
log("[I] ---------- Verifing DB root password ---------- ","info")
password_validation(xa_db_root_password,"DBA root");
log("[I] ---------- Verifing Ranger KMS db user password ---------- ","info")
password_validation(db_password,"KMS");
# Methods Begin
if DBA_MODE == "TRUE" :
if (dryMode==True):
log("[I] Logging DBA Script in file:"+str(globalDict["dryModeOutputFile"]),"info")
logFile("===============================================\n")
xa_sqlObj.writeDrymodeCmd(xa_db_root_user, xa_db_root_password, db_user, db_password, db_name)
logFile("===============================================\n")
if (dryMode==False):
log("[I] ---------- Creating Ranger KMS db user ---------- ","info")
xa_sqlObj.create_rangerdb_user(xa_db_root_user, db_user, db_password, xa_db_root_password,dryMode)
log("[I] ---------- Creating Ranger KMS database ----------","info")
xa_sqlObj.create_db(xa_db_root_user, xa_db_root_password, db_name, db_user, db_password,dryMode)
log("[I] ---------- Granting permission to Ranger KMS db user ----------","info")
xa_sqlObj.grant_xa_db_user(xa_db_root_user, db_name, db_user, db_password, xa_db_root_password, is_revoke,dryMode)
# Ranger KMS DB Host AND Ranger Audit DB Host are Different OR Same
log("[I] ---------- Ranger KMS DB and User Creation Process Completed.. ---------- ","info")
main(sys.argv)
| 45.858099
| 293
| 0.677954
| 10,168
| 68,512
| 4.313238
| 0.042093
| 0.036938
| 0.057459
| 0.031192
| 0.853228
| 0.826299
| 0.800762
| 0.787537
| 0.772967
| 0.763732
| 0
| 0.00291
| 0.17232
| 68,512
| 1,493
| 294
| 45.888814
| 0.7705
| 0.019121
| 0
| 0.677491
| 0
| 0.019188
| 0.238904
| 0.02345
| 0
| 0
| 0
| 0.00067
| 0
| 1
| 0.043542
| false
| 0.214022
| 0.009594
| 0
| 0.085609
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8a6cdc29e32a516f047e91b8bb4252a432d6916a
| 14,655
|
py
|
Python
|
ingestion/src/metadata/ingestion/sink/elasticsearch_constants.py
|
mosiac1/OpenMetadata
|
21c14d257c6ae7ed2aad2a9ccff2c3a3f1594681
|
[
"Apache-2.0"
] | 1
|
2022-01-06T11:12:01.000Z
|
2022-01-06T11:12:01.000Z
|
ingestion/src/metadata/ingestion/sink/elasticsearch_constants.py
|
mosiac1/OpenMetadata
|
21c14d257c6ae7ed2aad2a9ccff2c3a3f1594681
|
[
"Apache-2.0"
] | 6
|
2022-01-05T10:24:56.000Z
|
2022-01-06T09:57:07.000Z
|
ingestion/src/metadata/ingestion/sink/elasticsearch_constants.py
|
mosiac1/OpenMetadata
|
21c14d257c6ae7ed2aad2a9ccff2c3a3f1594681
|
[
"Apache-2.0"
] | 5
|
2022-01-05T11:48:39.000Z
|
2022-01-06T06:45:25.000Z
|
# Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import textwrap
TABLE_ELASTICSEARCH_INDEX_MAPPING = textwrap.dedent(
"""
{
"mappings":{
"properties": {
"name": {
"type":"text"
},
"display_name": {
"type": "text"
},
"owner": {
"properties": {
"id": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 36
}
}
},
"type": {
"type": "text"
},
"name": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"fullyQualifiedName": {
"type": "text"
},
"description": {
"type": "text"
},
"deleted": {
"type": "boolean"
},
"href": {
"type": "text"
}
}
},
"deleted": {
"type": "boolean"
},
"followers": {
"type": "keyword"
},
"fqdn": {
"type": "keyword"
},
"last_updated_timestamp": {
"type": "date",
"format": "epoch_second"
},
"description": {
"type": "text"
},
"tier": {
"type": "keyword"
},
"column_names": {
"type":"text"
},
"column_descriptions": {
"type": "text"
},
"tags": {
"type": "keyword"
},
"service": {
"type": "keyword"
},
"service_type": {
"type": "keyword"
},
"service_category": {
"type": "keyword"
},
"entity_type": {
"type": "keyword"
},
"database": {
"type": "keyword"
},
"database_schema": {
"type": "keyword"
},
"suggest": {
"type": "completion"
},
"monthly_stats":{
"type": "long"
},
"monthly_percentile_rank":{
"type": "long"
},
"weekly_stats":{
"type": "long"
},
"weekly_percentile_rank":{
"type": "long"
},
"daily_percentile_rank": {
"type": "long"
},
"daily_stats": {
"type": "long"
}
}
}
}
"""
)
TOPIC_ELASTICSEARCH_INDEX_MAPPING = textwrap.dedent(
"""
{
"mappings":{
"properties": {
"name": {
"type":"text"
},
"display_name": {
"type": "text"
},
"owner": {
"properties": {
"id": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 36
}
}
},
"type": {
"type": "text"
},
"name": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"fullyQualifiedName": {
"type": "text"
},
"description": {
"type": "text"
},
"deleted": {
"type": "boolean"
},
"href": {
"type": "text"
}
}
},
"deleted": {
"type": "boolean"
},
"followers": {
"type": "keyword"
},
"fqdn": {
"type": "keyword"
},
"last_updated_timestamp": {
"type": "date",
"format": "epoch_second"
},
"description": {
"type": "text"
},
"tier": {
"type": "keyword"
},
"tags": {
"type": "keyword"
},
"service": {
"type": "keyword"
},
"service_type": {
"type": "keyword"
},
"service_category": {
"type": "keyword"
},
"entity_type": {
"type": "keyword"
},
"suggest": {
"type": "completion"
}
}
}
}
"""
)
DASHBOARD_ELASTICSEARCH_INDEX_MAPPING = textwrap.dedent(
"""
{
"mappings":{
"properties": {
"name": {
"type":"text"
},
"display_name": {
"type": "text"
},
"owner": {
"properties": {
"id": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 36
}
}
},
"type": {
"type": "text"
},
"name": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"fullyQualifiedName": {
"type": "text"
},
"description": {
"type": "text"
},
"deleted": {
"type": "boolean"
},
"href": {
"type": "text"
}
}
},
"deleted": {
"type": "boolean"
},
"fqdn": {
"type": "keyword"
},
"followers": {
"type": "keyword"
},
"last_updated_timestamp": {
"type": "date",
"format": "epoch_second"
},
"description": {
"type": "text"
},
"chart_names": {
"type":"text"
},
"chart_descriptions": {
"type": "text"
},
"tier": {
"type": "keyword"
},
"tags": {
"type": "keyword"
},
"service": {
"type": "keyword"
},
"service_type": {
"type": "keyword"
},
"service_category": {
"type": "keyword"
},
"entity_type": {
"type": "keyword"
},
"suggest": {
"type": "completion"
},
"monthly_stats":{
"type": "long"
},
"monthly_percentile_rank":{
"type": "long"
},
"weekly_stats":{
"type": "long"
},
"weekly_percentile_rank":{
"type": "long"
},
"daily_percentile_rank": {
"type": "long"
},
"daily_stats": {
"type": "long"
}
}
}
}
"""
)
PIPELINE_ELASTICSEARCH_INDEX_MAPPING = textwrap.dedent(
"""
{
"mappings":{
"properties": {
"name": {
"type":"text"
},
"display_name": {
"type": "text"
},
"fqdn": {
"type": "keyword"
},
"owner": {
"properties": {
"id": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 36
}
}
},
"type": {
"type": "text"
},
"name": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"fullyQualifiedName": {
"type": "text"
},
"description": {
"type": "text"
},
"deleted": {
"type": "boolean"
},
"href": {
"type": "text"
}
}
},
"deleted": {
"type": "boolean"
},
"followers": {
"type": "keyword"
},
"last_updated_timestamp": {
"type": "date",
"format": "epoch_second"
},
"description": {
"type": "text"
},
"task_names": {
"type":"text"
},
"task_descriptions": {
"type": "text"
},
"tier": {
"type": "keyword"
},
"tags": {
"type": "keyword"
},
"service": {
"type": "keyword"
},
"service_type": {
"type": "keyword"
},
"service_category": {
"type": "keyword"
},
"entity_type": {
"type": "keyword"
},
"suggest": {
"type": "completion"
}
}
}
}
"""
)
USER_ELASTICSEARCH_INDEX_MAPPING = textwrap.dedent(
"""
{
"mappings":{
"properties": {
"name": {
"type":"text"
},
"display_name": {
"type": "text"
},
"email": {
"type": "text"
},
"last_updated_timestamp": {
"type": "date",
"format": "epoch_second"
},
"entity_type": {
"type": "keyword"
},
"teams": {
"type": "keyword"
},
"roles": {
"type": "keyword"
},
"deleted": {
"type": "boolean"
},
"suggest": {
"type": "completion"
}
}
}
}
"""
)
TEAM_ELASTICSEARCH_INDEX_MAPPING = textwrap.dedent(
"""
{
"mappings":{
"properties": {
"name": {
"type":"text"
},
"display_name": {
"type": "text"
},
"last_updated_timestamp": {
"type": "date",
"format": "epoch_second"
},
"entity_type": {
"type": "keyword"
},
"deleted": {
"type": "boolean"
},
"users": {
"type": "keyword"
},
"owns": {
"type": "keyword"
},
"suggest": {
"type": "completion"
}
}
}
}
"""
)
GLOSSARY_TERM_ELASTICSEARCH_INDEX_MAPPING = textwrap.dedent(
"""
{
"mappings": {
"properties": {
"name": {
"type": "text"
},
"display_name": {
"type": "text"
},
"fqdn": {
"type": "keyword"
},
"owner": {
"properties": {
"id": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 36
}
}
},
"type": {
"type": "text"
},
"name": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"fullyQualifiedName": {
"type": "text"
},
"description": {
"type": "text"
},
"deleted": {
"type": "boolean"
},
"href": {
"type": "text"
}
}
},
"last_updated_timestamp": {
"type": "date",
"format": "epoch_second"
},
"description": {
"type": "text"
},
"glossary_name": {
"type": "keyword"
},
"glossary_id": {
"type": "keyword"
},
"deleted": {
"type": "boolean"
},
"status": {
"type": "keyword"
},
"tags": {
"type": "keyword"
},
"entity_type": {
"type": "keyword"
},
"suggest": {
"type": "completion"
}
}
}
}
"""
)
| 24.547739
| 75
| 0.287683
| 732
| 14,655
| 5.620219
| 0.178962
| 0.176471
| 0.040836
| 0.058337
| 0.820126
| 0.790228
| 0.790228
| 0.790228
| 0.790228
| 0.777346
| 0
| 0.005298
| 0.574957
| 14,655
| 596
| 76
| 24.588926
| 0.655161
| 0.037462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a92665e3694e87a7781af1cb7573455d7a826ba
| 27,479
|
py
|
Python
|
airflow/providers/google/cloud/operators/datastore.py
|
hubert-pietron/airflow
|
3f9073c46940ef1f25a9f46b447d9cf84435c3ed
|
[
"Apache-2.0"
] | 8,092
|
2016-04-27T20:32:29.000Z
|
2019-01-05T07:39:33.000Z
|
airflow/providers/google/cloud/operators/datastore.py
|
hubert-pietron/airflow
|
3f9073c46940ef1f25a9f46b447d9cf84435c3ed
|
[
"Apache-2.0"
] | 2,961
|
2016-05-05T07:16:16.000Z
|
2019-01-05T08:47:59.000Z
|
airflow/providers/google/cloud/operators/datastore.py
|
hubert-pietron/airflow
|
3f9073c46940ef1f25a9f46b447d9cf84435c3ed
|
[
"Apache-2.0"
] | 3,546
|
2016-05-04T20:33:16.000Z
|
2019-01-05T05:14:26.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""This module contains Google Datastore operators."""
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.datastore import DatastoreHook
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.cloud.links.datastore import (
CloudDatastoreEntitiesLink,
CloudDatastoreImportExportLink,
)
from airflow.providers.google.common.links.storage import StorageLink
if TYPE_CHECKING:
from airflow.utils.context import Context
class CloudDatastoreExportEntitiesOperator(BaseOperator):
"""
Export entities from Google Cloud Datastore to Cloud Storage
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudDatastoreExportEntitiesOperator`
.. seealso::
https://cloud.google.com/datastore/docs/export-import-entities
:param bucket: name of the cloud storage bucket to backup data
:param namespace: optional namespace path in the specified Cloud Storage bucket
to backup data. If this namespace does not exist in GCS, it will be created.
:param datastore_conn_id: the name of the Datastore connection id to use
:param cloud_storage_conn_id: the name of the cloud storage connection id to
force-write backup
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param entity_filter: description of what data from the project is included in the
export, refer to
https://cloud.google.com/datastore/docs/reference/rest/Shared.Types/EntityFilter
:param labels: client-assigned labels for cloud storage
:param polling_interval_in_seconds: number of seconds to wait before polling for
execution status again
:param overwrite_existing: if the storage bucket + namespace is not empty, it will be
emptied prior to exports. This enables overwriting existing backups.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
'bucket',
'namespace',
'entity_filter',
'labels',
'impersonation_chain',
)
operator_extra_links = (StorageLink(),)
def __init__(
self,
*,
bucket: str,
namespace: Optional[str] = None,
datastore_conn_id: str = 'google_cloud_default',
cloud_storage_conn_id: str = 'google_cloud_default',
delegate_to: Optional[str] = None,
entity_filter: Optional[dict] = None,
labels: Optional[dict] = None,
polling_interval_in_seconds: int = 10,
overwrite_existing: bool = False,
project_id: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.datastore_conn_id = datastore_conn_id
self.cloud_storage_conn_id = cloud_storage_conn_id
self.delegate_to = delegate_to
self.bucket = bucket
self.namespace = namespace
self.entity_filter = entity_filter
self.labels = labels
self.polling_interval_in_seconds = polling_interval_in_seconds
self.overwrite_existing = overwrite_existing
self.project_id = project_id
self.impersonation_chain = impersonation_chain
def execute(self, context: 'Context') -> dict:
self.log.info('Exporting data to Cloud Storage bucket %s', self.bucket)
if self.overwrite_existing and self.namespace:
gcs_hook = GCSHook(self.cloud_storage_conn_id, impersonation_chain=self.impersonation_chain)
objects = gcs_hook.list(self.bucket, prefix=self.namespace)
for obj in objects:
gcs_hook.delete(self.bucket, obj)
ds_hook = DatastoreHook(
gcp_conn_id=self.datastore_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
result = ds_hook.export_to_storage_bucket(
bucket=self.bucket,
namespace=self.namespace,
entity_filter=self.entity_filter,
labels=self.labels,
project_id=self.project_id,
)
operation_name = result['name']
result = ds_hook.poll_operation_until_done(operation_name, self.polling_interval_in_seconds)
state = result['metadata']['common']['state']
if state != 'SUCCESSFUL':
raise AirflowException(f'Operation failed: result={result}')
StorageLink.persist(
context=context,
task_instance=self,
uri=f"{self.bucket}/{result['response']['outputUrl'].split('/')[3]}",
)
return result
class CloudDatastoreImportEntitiesOperator(BaseOperator):
"""
Import entities from Cloud Storage to Google Cloud Datastore
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudDatastoreImportEntitiesOperator`
.. seealso::
https://cloud.google.com/datastore/docs/export-import-entities
:param bucket: container in Cloud Storage to store data
:param file: path of the backup metadata file in the specified Cloud Storage bucket.
It should have the extension .overall_export_metadata
:param namespace: optional namespace of the backup metadata file in
the specified Cloud Storage bucket.
:param entity_filter: description of what data from the project is included in
the export, refer to
https://cloud.google.com/datastore/docs/reference/rest/Shared.Types/EntityFilter
:param labels: client-assigned labels for cloud storage
:param datastore_conn_id: the name of the connection id to use
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param polling_interval_in_seconds: number of seconds to wait before polling for
execution status again
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
'bucket',
'file',
'namespace',
'entity_filter',
'labels',
'impersonation_chain',
)
operator_extra_links = (CloudDatastoreImportExportLink(),)
def __init__(
self,
*,
bucket: str,
file: str,
namespace: Optional[str] = None,
entity_filter: Optional[dict] = None,
labels: Optional[dict] = None,
datastore_conn_id: str = 'google_cloud_default',
delegate_to: Optional[str] = None,
polling_interval_in_seconds: float = 10,
project_id: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.datastore_conn_id = datastore_conn_id
self.delegate_to = delegate_to
self.bucket = bucket
self.file = file
self.namespace = namespace
self.entity_filter = entity_filter
self.labels = labels
self.polling_interval_in_seconds = polling_interval_in_seconds
self.project_id = project_id
self.impersonation_chain = impersonation_chain
def execute(self, context: 'Context'):
self.log.info('Importing data from Cloud Storage bucket %s', self.bucket)
ds_hook = DatastoreHook(
self.datastore_conn_id,
self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
result = ds_hook.import_from_storage_bucket(
bucket=self.bucket,
file=self.file,
namespace=self.namespace,
entity_filter=self.entity_filter,
labels=self.labels,
project_id=self.project_id,
)
operation_name = result['name']
result = ds_hook.poll_operation_until_done(operation_name, self.polling_interval_in_seconds)
state = result['metadata']['common']['state']
if state != 'SUCCESSFUL':
raise AirflowException(f'Operation failed: result={result}')
CloudDatastoreImportExportLink.persist(context=context, task_instance=self)
return result
class CloudDatastoreAllocateIdsOperator(BaseOperator):
"""
Allocate IDs for incomplete keys. Return list of keys.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudDatastoreAllocateIdsOperator`
.. seealso::
https://cloud.google.com/datastore/docs/reference/rest/v1/projects/allocateIds
:param partial_keys: a list of partial keys.
:param project_id: Google Cloud project ID against which to make the request.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"partial_keys",
"impersonation_chain",
)
operator_extra_links = (CloudDatastoreEntitiesLink(),)
def __init__(
self,
*,
partial_keys: List,
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.partial_keys = partial_keys
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: 'Context') -> list:
hook = DatastoreHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
keys = hook.allocate_ids(
partial_keys=self.partial_keys,
project_id=self.project_id,
)
CloudDatastoreEntitiesLink.persist(context=context, task_instance=self)
return keys
class CloudDatastoreBeginTransactionOperator(BaseOperator):
"""
Begins a new transaction. Returns a transaction handle.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudDatastoreBeginTransactionOperator`
.. seealso::
https://cloud.google.com/datastore/docs/reference/rest/v1/projects/beginTransaction
:param transaction_options: Options for a new transaction.
:param project_id: Google Cloud project ID against which to make the request.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"transaction_options",
"impersonation_chain",
)
def __init__(
self,
*,
transaction_options: Dict[str, Any],
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.transaction_options = transaction_options
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: 'Context') -> str:
hook = DatastoreHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
handle = hook.begin_transaction(
transaction_options=self.transaction_options,
project_id=self.project_id,
)
return handle
class CloudDatastoreCommitOperator(BaseOperator):
"""
Commit a transaction, optionally creating, deleting or modifying some entities.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudDatastoreCommitOperator`
.. seealso::
https://cloud.google.com/datastore/docs/reference/rest/v1/projects/commit
:param body: the body of the commit request.
:param project_id: Google Cloud project ID against which to make the request.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"body",
"impersonation_chain",
)
operator_extra_links = (CloudDatastoreEntitiesLink(),)
def __init__(
self,
*,
body: Dict[str, Any],
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.body = body
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: 'Context') -> dict:
hook = DatastoreHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
response = hook.commit(
body=self.body,
project_id=self.project_id,
)
CloudDatastoreEntitiesLink.persist(context=context, task_instance=self)
return response
class CloudDatastoreRollbackOperator(BaseOperator):
"""
Roll back a transaction.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudDatastoreRollbackOperator`
.. seealso::
https://cloud.google.com/datastore/docs/reference/rest/v1/projects/rollback
:param transaction: the transaction to roll back.
:param project_id: Google Cloud project ID against which to make the request.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"transaction",
"impersonation_chain",
)
def __init__(
self,
*,
transaction: str,
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.transaction = transaction
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: 'Context') -> None:
hook = DatastoreHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
hook.rollback(
transaction=self.transaction,
project_id=self.project_id,
)
class CloudDatastoreRunQueryOperator(BaseOperator):
"""
Run a query for entities. Returns the batch of query results.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudDatastoreRunQueryOperator`
.. seealso::
https://cloud.google.com/datastore/docs/reference/rest/v1/projects/runQuery
:param body: the body of the query request.
:param project_id: Google Cloud project ID against which to make the request.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"body",
"impersonation_chain",
)
def __init__(
self,
*,
body: Dict[str, Any],
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.body = body
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: 'Context') -> dict:
hook = DatastoreHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
response = hook.run_query(
body=self.body,
project_id=self.project_id,
)
return response
class CloudDatastoreGetOperationOperator(BaseOperator):
"""
Gets the latest state of a long-running operation.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudDatastoreGetOperationOperator`
.. seealso::
https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/get
:param name: the name of the operation resource.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"name",
"impersonation_chain",
)
def __init__(
self,
*,
name: str,
delegate_to: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.name = name
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: 'Context'):
hook = DatastoreHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
op = hook.get_operation(name=self.name)
return op
class CloudDatastoreDeleteOperationOperator(BaseOperator):
"""
Deletes the long-running operation.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudDatastoreDeleteOperationOperator`
.. seealso::
https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/delete
:param name: the name of the operation resource.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"name",
"impersonation_chain",
)
def __init__(
self,
*,
name: str,
delegate_to: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.name = name
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: 'Context') -> None:
hook = DatastoreHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
hook.delete_operation(name=self.name)
| 41.136228
| 104
| 0.684559
| 3,386
| 27,479
| 5.412581
| 0.094211
| 0.06384
| 0.021116
| 0.024554
| 0.80406
| 0.786326
| 0.761227
| 0.75637
| 0.747967
| 0.734708
| 0
| 0.000773
| 0.246625
| 27,479
| 667
| 105
| 41.197901
| 0.884504
| 0.498162
| 0
| 0.708092
| 0
| 0
| 0.065315
| 0.004738
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052023
| false
| 0
| 0.040462
| 0
| 0.176301
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a97b264f913918b2943ca44affcd6219fc217d3
| 1,147
|
py
|
Python
|
api/models.py
|
albertwohletz/gmorganizer
|
dd4d9c22e2a9a150e1085a29a432c7b3aeaf8434
|
[
"MIT"
] | null | null | null |
api/models.py
|
albertwohletz/gmorganizer
|
dd4d9c22e2a9a150e1085a29a432c7b3aeaf8434
|
[
"MIT"
] | null | null | null |
api/models.py
|
albertwohletz/gmorganizer
|
dd4d9c22e2a9a150e1085a29a432c7b3aeaf8434
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class event(models.Model):
user = models.ForeignKey(User)
name = models.CharField(max_length=100)
text = models.CharField(max_length=5000)
hidden = models.BooleanField(default=False)
class subevent(models.Model):
user = models.ForeignKey(User)
event = models.ForeignKey(event)
name = models.CharField(max_length=100)
text = models.CharField(max_length=5000)
hidden = models.BooleanField(default=False)
class npc(models.Model):
user = models.ForeignKey(User)
name = models.CharField(max_length=100)
text = models.CharField(max_length=5000)
hidden = models.BooleanField(default=False)
class enemy(models.Model):
user = models.ForeignKey(User)
name = models.CharField(max_length=100)
text = models.CharField(max_length=5000)
hidden = models.BooleanField(default=False)
class pc(models.Model):
user = models.ForeignKey(User)
name = models.CharField(max_length=100)
text = models.CharField(max_length=5000)
hidden = models.BooleanField(default=False)
| 32.771429
| 47
| 0.727986
| 145
| 1,147
| 5.689655
| 0.206897
| 0.181818
| 0.218182
| 0.290909
| 0.836364
| 0.836364
| 0.793939
| 0.793939
| 0.793939
| 0.793939
| 0
| 0.036458
| 0.163034
| 1,147
| 35
| 48
| 32.771429
| 0.822917
| 0.020924
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
8ac06b267559a68bd8f89449860c2d3ba5f8d182
| 21,388
|
py
|
Python
|
tests/cli/test_datasource_new_helpers.py
|
victorcouste/great_expectations
|
9ee46d83feb87e13c769e2ae35b899b3f18d73a4
|
[
"Apache-2.0"
] | 2
|
2022-01-28T15:51:32.000Z
|
2022-02-02T05:07:58.000Z
|
tests/cli/test_datasource_new_helpers.py
|
victorcouste/great_expectations
|
9ee46d83feb87e13c769e2ae35b899b3f18d73a4
|
[
"Apache-2.0"
] | null | null | null |
tests/cli/test_datasource_new_helpers.py
|
victorcouste/great_expectations
|
9ee46d83feb87e13c769e2ae35b899b3f18d73a4
|
[
"Apache-2.0"
] | 1
|
2021-10-08T01:24:50.000Z
|
2021-10-08T01:24:50.000Z
|
from unittest import mock
import pytest
import great_expectations.exceptions as ge_exceptions
from great_expectations import DataContext
from great_expectations.cli.datasource import (
BigqueryCredentialYamlHelper,
ConnectionStringCredentialYamlHelper,
MySQLCredentialYamlHelper,
PandasYamlHelper,
PostgresCredentialYamlHelper,
RedshiftCredentialYamlHelper,
SnowflakeAuthMethod,
SnowflakeCredentialYamlHelper,
SparkYamlHelper,
SQLCredentialYamlHelper,
check_if_datasource_name_exists,
)
from great_expectations.datasource.types import DatasourceTypes
def test_SQLCredentialYamlHelper_defaults(empty_data_context):
helper = SQLCredentialYamlHelper(usage_stats_payload={"foo": "bar"})
expected_credentials_snippet = '''\
host = "YOUR_HOST"
port = "YOUR_PORT"
username = "YOUR_USERNAME"
password = "YOUR_PASSWORD"
database = "YOUR_DATABASE"'''
assert helper.credentials_snippet() == expected_credentials_snippet
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
credentials:
host: {host}
port: '{port}'
username: {username}
password: {password}
database: {database}
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
renderer = helper.get_notebook_renderer(empty_data_context)
assert renderer.sql_credentials_code_snippet == expected_credentials_snippet
def test_SQLCredentialYamlHelper_driver(empty_data_context):
helper = SQLCredentialYamlHelper(usage_stats_payload={"foo": "bar"}, driver="stuff")
expected_credentials_snippet = '''\
host = "YOUR_HOST"
port = "YOUR_PORT"
username = "YOUR_USERNAME"
password = "YOUR_PASSWORD"
database = "YOUR_DATABASE"'''
assert helper.credentials_snippet() == expected_credentials_snippet
print(helper.yaml_snippet())
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
credentials:
host: {host}
port: '{port}'
username: {username}
password: {password}
database: {database}
drivername: stuff
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
renderer = helper.get_notebook_renderer(empty_data_context)
assert renderer.sql_credentials_code_snippet == expected_credentials_snippet
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_MySQLCredentialYamlHelper(mock_emit, empty_data_context_stats_enabled):
helper = MySQLCredentialYamlHelper("my_datasource")
expected_credentials_snippet = '''\
host = "YOUR_HOST"
port = "3306"
username = "YOUR_USERNAME"
password = "YOUR_PASSWORD"
database = "YOUR_DATABASE"'''
assert helper.credentials_snippet() == expected_credentials_snippet
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
credentials:
host: {host}
port: '{port}'
username: {username}
password: {password}
database: {database}
drivername: mysql+pymysql
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
assert mock_emit.call_count == 1
assert mock_emit.call_args_list == [
mock.call(
{
"event": "cli.new_ds_choice",
"event_payload": {
"type": "sqlalchemy",
"db": "MySQL",
"api_version": "v3",
},
"success": True,
}
),
]
renderer = helper.get_notebook_renderer(empty_data_context_stats_enabled)
assert renderer.sql_credentials_code_snippet == expected_credentials_snippet
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_PostgresCredentialYamlHelper(mock_emit, empty_data_context_stats_enabled):
helper = PostgresCredentialYamlHelper("my_datasource")
expected_credentials_snippet = '''\
host = "YOUR_HOST"
port = "5432"
username = "YOUR_USERNAME"
password = "YOUR_PASSWORD"
database = "postgres"'''
assert helper.credentials_snippet() == expected_credentials_snippet
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
credentials:
host: {host}
port: '{port}'
username: {username}
password: {password}
database: {database}
drivername: postgresql
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
assert mock_emit.call_count == 1
assert mock_emit.call_args_list == [
mock.call(
{
"event": "cli.new_ds_choice",
"event_payload": {
"type": "sqlalchemy",
"db": "Postgres",
"api_version": "v3",
},
"success": True,
}
),
]
renderer = helper.get_notebook_renderer(empty_data_context_stats_enabled)
assert renderer.sql_credentials_code_snippet == expected_credentials_snippet
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_RedshiftCredentialYamlHelper(mock_emit, empty_data_context_stats_enabled):
helper = RedshiftCredentialYamlHelper("my_datasource")
expected_credentials_snippet = '''\
host = "YOUR_HOST"
port = "5439"
username = "YOUR_USERNAME"
password = "YOUR_PASSWORD"
database = "YOUR_DATABASE"'''
assert helper.credentials_snippet() == expected_credentials_snippet
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
credentials:
host: {host}
port: '{port}'
username: {username}
password: {password}
database: {database}
query:
sslmode: prefer
drivername: postgresql+psycopg2
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
assert mock_emit.call_count == 1
assert mock_emit.call_args_list == [
mock.call(
{
"event": "cli.new_ds_choice",
"event_payload": {
"type": "sqlalchemy",
"db": "Redshift",
"api_version": "v3",
},
"success": True,
}
),
]
renderer = helper.get_notebook_renderer(empty_data_context_stats_enabled)
assert renderer.sql_credentials_code_snippet == expected_credentials_snippet
@mock.patch("click.prompt")
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_SnowflakeCredentialYamlHelper_password_auth(
mock_emit, mock_prompt, empty_data_context_stats_enabled
):
helper = SnowflakeCredentialYamlHelper("my_datasource")
mock_prompt.side_effect = ["1"]
helper.prompt()
assert helper.auth_method == SnowflakeAuthMethod.USER_AND_PASSWORD
expected_credentials_snippet = '''\
host = "YOUR_HOST" # The account name (include region -- ex 'ABCD.us-east-1')
username = "YOUR_USERNAME"
database = "" # The database name (optional -- leave blank for none)
schema = "" # The schema name (optional -- leave blank for none)
warehouse = "" # The warehouse name (optional -- leave blank for none)
role = "" # The role name (optional -- leave blank for none)
password = "YOUR_PASSWORD"'''
assert helper.credentials_snippet() == expected_credentials_snippet
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
credentials:
host: {host}
username: {username}
database: {database}
query:
schema: {schema}
warehouse: {warehouse}
role: {role}
password: {password}
drivername: snowflake
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
_snowflake_usage_stats_assertions(mock_emit)
renderer = helper.get_notebook_renderer(empty_data_context_stats_enabled)
assert renderer.sql_credentials_code_snippet == expected_credentials_snippet
@mock.patch("click.prompt")
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_SnowflakeCredentialYamlHelper_sso_auth(
mock_emit, mock_prompt, empty_data_context_stats_enabled
):
helper = SnowflakeCredentialYamlHelper("my_datasource")
mock_prompt.side_effect = ["2"]
helper.prompt()
assert helper.auth_method == SnowflakeAuthMethod.SSO
expected_credentials_snippet = """\
host = "YOUR_HOST" # The account name (include region -- ex 'ABCD.us-east-1')
username = "YOUR_USERNAME"
database = "" # The database name (optional -- leave blank for none)
schema = "" # The schema name (optional -- leave blank for none)
warehouse = "" # The warehouse name (optional -- leave blank for none)
role = "" # The role name (optional -- leave blank for none)
authenticator_url = "externalbrowser" # A valid okta URL or 'externalbrowser' used to connect through SSO"""
assert helper.credentials_snippet() == expected_credentials_snippet
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
credentials:
host: {host}
username: {username}
database: {database}
query:
schema: {schema}
warehouse: {warehouse}
role: {role}
connect_args:
authenticator: {authenticator_url}
drivername: snowflake
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
_snowflake_usage_stats_assertions(mock_emit)
renderer = helper.get_notebook_renderer(empty_data_context_stats_enabled)
assert renderer.sql_credentials_code_snippet == expected_credentials_snippet
@mock.patch("click.prompt")
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_SnowflakeCredentialYamlHelper_key_pair_auth(
mock_emit, mock_prompt, empty_data_context_stats_enabled
):
helper = SnowflakeCredentialYamlHelper("my_datasource")
mock_prompt.side_effect = ["3"]
helper.prompt()
assert helper.auth_method == SnowflakeAuthMethod.KEY_PAIR
expected_credentials_snippet = """host = "YOUR_HOST" # The account name (include region -- ex 'ABCD.us-east-1')
username = "YOUR_USERNAME"
database = "" # The database name (optional -- leave blank for none)
schema = "" # The schema name (optional -- leave blank for none)
warehouse = "" # The warehouse name (optional -- leave blank for none)
role = "" # The role name (optional -- leave blank for none)
private_key_path = "YOUR_KEY_PATH" # Path to the private key used for authentication
private_key_passphrase = "" # Passphrase for the private key used for authentication (optional -- leave blank for none)"""
assert helper.credentials_snippet() == expected_credentials_snippet
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
credentials:
host: {host}
username: {username}
database: {database}
query:
schema: {schema}
warehouse: {warehouse}
role: {role}
private_key_path: {private_key_path}
private_key_passphrase: {private_key_passphrase}
drivername: snowflake
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
_snowflake_usage_stats_assertions(mock_emit)
renderer = helper.get_notebook_renderer(empty_data_context_stats_enabled)
assert renderer.sql_credentials_code_snippet == expected_credentials_snippet
def _snowflake_usage_stats_assertions(mock_emit):
assert mock_emit.call_count == 1
assert mock_emit.call_args_list == [
mock.call(
{
"event": "cli.new_ds_choice",
"event_payload": {
"type": "sqlalchemy",
"db": "Snowflake",
"api_version": "v3",
},
"success": True,
}
),
]
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_BigqueryCredentialYamlHelper(mock_emit, empty_data_context_stats_enabled):
helper = BigqueryCredentialYamlHelper("my_datasource")
assert (
helper.credentials_snippet()
== '''\
# The SQLAlchemy url/connection string for the BigQuery connection
# (reference: https://github.com/mxmzdlv/pybigquery#connection-string-parameters)"""
connection_string = "YOUR_BIGQUERY_CONNECTION_STRING"'''
)
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
connection_string: {connection_string}
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
assert mock_emit.call_count == 1
assert mock_emit.call_args_list == [
mock.call(
{
"event": "cli.new_ds_choice",
"event_payload": {
"type": "sqlalchemy",
"db": "BigQuery",
"api_version": "v3",
},
"success": True,
}
),
]
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_ConnectionStringCredentialYamlHelper(
mock_emit, empty_data_context_stats_enabled
):
helper = ConnectionStringCredentialYamlHelper("my_datasource")
assert (
helper.credentials_snippet()
== '''\
# The url/connection string for the sqlalchemy connection
# (reference: https://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls)
connection_string = "YOUR_CONNECTION_STRING"'''
)
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SqlAlchemyExecutionEngine
connection_string: {connection_string}
data_connectors:
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
default_inferred_data_connector_name:
class_name: InferredAssetSqlDataConnector
name: whole_table"""'''
)
assert helper.verify_libraries_installed() is True
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
assert mock_emit.call_count == 1
assert mock_emit.call_args_list == [
mock.call(
{
"event": "cli.new_ds_choice",
"event_payload": {
"type": "sqlalchemy",
"db": "other",
"api_version": "v3",
},
"success": True,
}
),
]
@mock.patch("click.prompt")
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_PandasYamlHelper(mock_emit, mock_prompt, empty_data_context_stats_enabled):
helper = PandasYamlHelper(context_root_dir="foo", datasource_name="bar")
assert helper.context_root_dir == "foo"
assert helper.datasource_name == "bar"
assert helper.datasource_type == DatasourceTypes.PANDAS
assert helper.class_name == "PandasExecutionEngine"
assert helper.verify_libraries_installed() is True
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
assert mock_emit.call_count == 1
assert mock_emit.call_args_list == [
mock.call(
{
"event": "cli.new_ds_choice",
"event_payload": {"type": "pandas", "api_version": "v3"},
"success": True,
}
),
]
assert helper.base_path == ""
mock_prompt.side_effect = ["path/to/data"]
helper.prompt()
assert helper.base_path == "../path/to/data"
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: PandasExecutionEngine
data_connectors:
default_inferred_data_connector_name:
class_name: InferredAssetFilesystemDataConnector
base_directory: ../path/to/data
default_regex:
group_names:
- data_asset_name
pattern: (.*)
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
"""'''
)
@mock.patch("click.prompt")
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
def test_SparkYamlHelper(mock_emit, mock_prompt, empty_data_context_stats_enabled):
helper = SparkYamlHelper(context_root_dir="foo", datasource_name="bar")
assert helper.context_root_dir == "foo"
assert helper.datasource_name == "bar"
assert helper.datasource_type == DatasourceTypes.SPARK
assert helper.class_name == "SparkDFExecutionEngine"
helper.send_backend_choice_usage_message(empty_data_context_stats_enabled)
assert mock_emit.call_count == 1
assert mock_emit.call_args_list == [
mock.call(
{
"event": "cli.new_ds_choice",
"event_payload": {"type": "spark", "api_version": "v3"},
"success": True,
}
),
]
assert helper.base_path == ""
mock_prompt.side_effect = ["path/to/data"]
helper.prompt()
assert helper.base_path == "../path/to/data"
assert (
helper.yaml_snippet()
== '''f"""
name: {datasource_name}
class_name: Datasource
execution_engine:
class_name: SparkDFExecutionEngine
data_connectors:
default_inferred_data_connector_name:
class_name: InferredAssetFilesystemDataConnector
base_directory: ../path/to/data
default_regex:
group_names:
- data_asset_name
pattern: (.*)
default_runtime_data_connector_name:
class_name: RuntimeDataConnector
batch_identifiers:
- default_identifier_name
"""'''
)
def test_check_if_datasource_name_exists(
titanic_pandas_data_context_with_v013_datasource_with_checkpoints_v1_with_empty_store_stats_enabled,
):
context: DataContext = titanic_pandas_data_context_with_v013_datasource_with_checkpoints_v1_with_empty_store_stats_enabled
assert [d["name"] for d in context.list_datasources()] == [
"my_datasource",
]
assert len(context.list_datasources()) == 1
# Exists
assert check_if_datasource_name_exists(
context=context, datasource_name="my_datasource"
)
# Doesn't exist
assert (
check_if_datasource_name_exists(
context=context, datasource_name="nonexistent_datasource"
)
is False
)
| 31.780089
| 126
| 0.706938
| 2,200
| 21,388
| 6.499091
| 0.097273
| 0.031473
| 0.032732
| 0.038187
| 0.867744
| 0.856064
| 0.84872
| 0.826339
| 0.811652
| 0.800322
| 0
| 0.002574
| 0.200673
| 21,388
| 672
| 127
| 31.827381
| 0.833762
| 0.000935
| 0
| 0.745098
| 0
| 0.006536
| 0.490662
| 0.133302
| 0
| 0
| 0
| 0
| 0.116013
| 1
| 0.022876
| false
| 0.026144
| 0.009804
| 0
| 0.03268
| 0.001634
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a1539743b18556e43a23cba2f243aadcd9920f4
| 1,725
|
py
|
Python
|
settings.py
|
ullesdiykanal/zeldaclone
|
f4d9c0fb1ecb0ef659901c03662c4d7724adea05
|
[
"CC0-1.0"
] | null | null | null |
settings.py
|
ullesdiykanal/zeldaclone
|
f4d9c0fb1ecb0ef659901c03662c4d7724adea05
|
[
"CC0-1.0"
] | null | null | null |
settings.py
|
ullesdiykanal/zeldaclone
|
f4d9c0fb1ecb0ef659901c03662c4d7724adea05
|
[
"CC0-1.0"
] | null | null | null |
WIDTH = 1280
HEIGTH = 720
FPS = 60
TILESIZE = 64
WORLD_MAP = [
['x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x'],
['x',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ','x'],
['x',' ','p',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ','x'],
['x',' ',' ','x',' ',' ',' ',' ',' ','x','x','x','x','x',' ',' ',' ',' ',' ','x'],
['x',' ',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x',' ',' ',' ',' ',' ','x'],
['x',' ',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x',' ',' ',' ',' ',' ','x'],
['x',' ',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x',' ',' ',' ',' ',' ','x'],
['x',' ',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x',' ',' ',' ',' ',' ','x'],
['x',' ',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x',' ',' ',' ',' ',' ','x'],
['x',' ',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x',' ',' ',' ',' ',' ','x'],
['x',' ',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x',' ',' ',' ',' ',' ','x'],
['x',' ',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x','x','x',' ',' ',' ','x'],
['x',' ',' ',' ',' ',' ',' ','x',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x'],
['x',' ',' ',' ',' ',' ','x','x','x','x','x',' ',' ',' ',' ',' ',' ',' ',' ','x'],
['x',' ',' ',' ',' ',' ',' ','x','x','x',' ',' ',' ',' ',' ',' ',' ',' ',' ','x'],
['x',' ',' ',' ',' ',' ',' ',' ','x',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ','x'],
['x',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ','x'],
['x',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ','x'],
['x',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ','x'],
['x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x'],
]
| 63.888889
| 82
| 0.089275
| 122
| 1,725
| 1.254098
| 0.098361
| 1.424837
| 2.098039
| 2.745098
| 0.72549
| 0.72549
| 0.72549
| 0.72549
| 0.72549
| 0.72549
| 0
| 0.007852
| 0.187826
| 1,725
| 27
| 83
| 63.888889
| 0.101356
| 0
| 0
| 0.5
| 0
| 0
| 0.23175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0a2057c7f4109d860d8bcb922fc01007625723ea
| 219
|
py
|
Python
|
tools/creator/libcreator/utils/__init__.py
|
duaneellissd/ejtag
|
bd6ebd1d2ec826667f44d48836ce90368b8d4b6f
|
[
"BSD-3-Clause"
] | null | null | null |
tools/creator/libcreator/utils/__init__.py
|
duaneellissd/ejtag
|
bd6ebd1d2ec826667f44d48836ce90368b8d4b6f
|
[
"BSD-3-Clause"
] | null | null | null |
tools/creator/libcreator/utils/__init__.py
|
duaneellissd/ejtag
|
bd6ebd1d2ec826667f44d48836ce90368b8d4b6f
|
[
"BSD-3-Clause"
] | null | null | null |
from .frozenclass import FrozenClass, FrozenException
from .filelocation import FileLocation
from .filelocation import FileLocation_cmdline
from .filelocation import FileLocation_builtin
from .platform_vars import *
| 24.333333
| 53
| 0.858447
| 23
| 219
| 8.043478
| 0.391304
| 0.259459
| 0.356757
| 0.551351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 219
| 8
| 54
| 27.375
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0a3164ac0363be3e6d08a38959296c8d67dd43a8
| 83
|
py
|
Python
|
test/fixtures/__init__.py
|
citation-file-format/citeme
|
33761da7504f4744f9d708c67e09b9a788381a47
|
[
"Apache-2.0"
] | 3
|
2018-07-18T03:34:09.000Z
|
2019-04-15T08:58:31.000Z
|
test/fixtures/__init__.py
|
NLeSC/citeme
|
33761da7504f4744f9d708c67e09b9a788381a47
|
[
"Apache-2.0"
] | 2
|
2020-09-03T16:58:55.000Z
|
2021-05-15T20:55:40.000Z
|
test/fixtures/__init__.py
|
citation-file-format/citeme
|
33761da7504f4744f9d708c67e09b9a788381a47
|
[
"Apache-2.0"
] | null | null | null |
from fixtures.my_func import my_func
from fixtures.my_func_bad import my_func_bad
| 20.75
| 44
| 0.86747
| 16
| 83
| 4.125
| 0.375
| 0.363636
| 0.424242
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108434
| 83
| 3
| 45
| 27.666667
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6a60aac42a004aea5487c5ad08cf76ea790a037b
| 9,773
|
py
|
Python
|
tests/test_marginal_crf.py
|
kajyuuen/pytorch-partial-crf
|
bb368346544cb6241e425a8b4e1a3baee324ef0d
|
[
"MIT"
] | 22
|
2019-09-23T16:07:44.000Z
|
2022-03-17T12:16:18.000Z
|
tests/test_marginal_crf.py
|
kajyuuen/pytorch-partial-crf
|
bb368346544cb6241e425a8b4e1a3baee324ef0d
|
[
"MIT"
] | 15
|
2019-10-20T08:23:55.000Z
|
2022-03-18T00:53:33.000Z
|
tests/test_marginal_crf.py
|
kajyuuen/pytorch-partial-crf
|
bb368346544cb6241e425a8b4e1a3baee324ef0d
|
[
"MIT"
] | 3
|
2019-12-10T12:44:41.000Z
|
2020-09-03T03:16:46.000Z
|
import itertools
import random
import math
from pytest import approx
import torch
from pytorch_partial_crf import MarginalCRF
SEED = 4738
random.seed(SEED)
torch.manual_seed(SEED)
def manually_score(transitions_from_start, transitions_to_end, transitions, emissions, tags, tag_proba=None):
# Add start and end scores
total = transitions_from_start[tags[0]] + transitions_to_end[tags[-1]]
# Add transition scores
for tag, next_tag in zip(tags, tags[1:]):
total += transitions[tag, next_tag]
# Add emission scores
for emission, tag in zip(emissions, tags):
total += emission[tag]
return total
class TestAsCRF:
def setup(self):
self.emissions = torch.Tensor([
[[1, 0, 0, .1, .4, .6], [0, .5, .7, 0, .1, .3], [.1, .5, 2, .7, 1, 0], [.4, 1, .9, .2, .9, 0]],
[[0, 0, 1, .7, .4, .3], [0, .1, .4, .8, 0, .2], [0, .5, .4, .9, 0, 1], [.3, .1, .7, 0, 0, .6]]
])
self.tags = torch.LongTensor([
[1, 3, 4, 5],
[3, 0, 2, 5]
])
self.marginal_tags = torch.Tensor([
[
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
],
[
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
],
])
self.transitions = torch.Tensor([
[0.1, 0.2, 0.3, 0.4, 0.5, 0.6],
[0.5, 0.2, 0.9, 0.7, 0.4, 0.1],
[-0.8, 7.3, -0.2, 3.7, 0.3, 1.0],
[0.2, 0.4, 0.6, -0.8, 1.0, -1.2],
[-1.0, 0, -1.0, 0.1, 1.0, 0.1],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
])
self.transitions_from_start = torch.Tensor([0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
self.transitions_to_end = torch.Tensor([-0.1, -0.2, 0.3, -0.4, -0.4, -0.5])
self.num_tags = 6
self.marginal_crf = MarginalCRF(self.num_tags)
self.marginal_crf.transitions = torch.nn.Parameter(self.transitions)
self.marginal_crf.start_transitions = torch.nn.Parameter(self.transitions_from_start)
self.marginal_crf.end_transitions = torch.nn.Parameter(self.transitions_to_end)
def test_forward_without_mask(self):
log_likelihood = self.marginal_crf(self.emissions, self.marginal_tags)
manual_log_likelihood = 0.0
for emission, tag in zip(self.emissions, self.tags):
gold_score = manually_score(self.transitions_from_start, self.transitions_to_end, self.transitions, emission.detach(), tag.detach())
forward_scores = [manually_score(self.transitions_from_start, self.transitions_to_end, self.transitions, emission.detach(), tags_j)
for tags_j in itertools.product(range(self.num_tags), repeat=4)]
denominator = math.log(sum(math.exp(forward_score) for forward_score in forward_scores))
manual_log_likelihood += denominator - gold_score
assert manual_log_likelihood.item() == approx(log_likelihood.item())
def test_forward_with_mask(self):
mask = torch.ByteTensor([
[1, 1, 1, 1],
[1, 1, 0, 0]
])
log_likelihood = self.marginal_crf(self.emissions, self.marginal_tags, mask)
manual_log_likelihood = 0.0
for emission, tag, mask_i in zip(self.emissions, self.tags, mask):
sequence_length = torch.sum(mask_i.detach())
emission = emission.data[:sequence_length]
tag = tag.data[:sequence_length]
gold_score = manually_score(self.transitions_from_start, self.transitions_to_end, self.transitions, emission.detach(), tag.detach())
forward_scores = [manually_score(self.transitions_from_start, self.transitions_to_end, self.transitions, emission.detach(), tags_j)
for tags_j in itertools.product(range(self.num_tags), repeat=sequence_length)]
denominator = math.log(sum(math.exp(forward_score) for forward_score in forward_scores))
manual_log_likelihood += denominator - gold_score
assert manual_log_likelihood.item() == approx(log_likelihood.item())
class TestAsPartialCRF:
def setup(self):
self.emissions = torch.Tensor([
[[1, 0, 0, .1, .4, .6], [0, .5, .7, 0, .1, .3], [.1, .5, 2, .7, 1, 0], [.4, 1, .9, .2, .9, 0]],
[[0, 0, 1, .7, .4, .3], [0, .1, .4, .8, 0, .2], [0, .5, .4, .9, 0, 1], [.3, .1, .7, 0, 0, .6]],
[[0, 0, 1, .7, .4, .3], [0, .1, .4, .8, 0, .2], [0, .5, .4, .9, 0, 1], [.3, .1, .7, 0, 0, .6]]
])
self.tags = torch.LongTensor([
[-1, 3, 4, -1],
[1, 1, -1, 1],
[-1, -1, -1, -1]
])
self.marginal_tags = torch.Tensor([
[
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
],
[
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
],
[
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
],
])
self.transitions = torch.Tensor([
[0.1, 0.2, 0.3, 0.4, 0.5, 0.6],
[0.5, 0.2, 0.9, 0.7, 0.4, 0.1],
[-0.8, 7.3, -0.2, 3.7, 0.3, 1.0],
[0.2, 0.4, 0.6, -0.8, 1.0, -1.2],
[-1.0, 0, -1.0, 0.1, 1.0, 0.1],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
])
self.transitions_from_start = torch.Tensor([0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
self.transitions_to_end = torch.Tensor([-0.1, -0.2, 0.3, -0.4, -0.4, -0.5])
self.num_tags = 6
self.marginal_crf = MarginalCRF(self.num_tags)
self.marginal_crf.transitions = torch.nn.Parameter(self.transitions)
self.marginal_crf.start_transitions = torch.nn.Parameter(self.transitions_from_start)
self.marginal_crf.end_transitions = torch.nn.Parameter(self.transitions_to_end)
def test_forward_without_mask(self):
log_likelihood = self.marginal_crf(self.emissions, self.marginal_tags)
manual_log_likelihood = 0.0
# Calculate available path score
for emission, tag in zip(self.emissions, self.tags):
gold_scores = []
for tags_j in itertools.product(range(self.num_tags), repeat=4):
flag = True
for ti, tj in zip(tag.tolist(), tags_j):
if ti == -1:
continue
else:
if ti != tj:
flag = False
break
if flag:
gold_scores.append(manually_score(self.transitions_from_start, self.transitions_to_end, self.transitions, emission.detach(), tags_j))
forward_scores = [manually_score(self.transitions_from_start, self.transitions_to_end, self.transitions, emission.detach(), tags_j)
for tags_j in itertools.product(range(self.num_tags), repeat=4)]
numerator = math.log(sum(math.exp(forward_score) for forward_score in gold_scores))
denominator = math.log(sum(math.exp(forward_score) for forward_score in forward_scores))
manual_log_likelihood += denominator - numerator
assert manual_log_likelihood == approx(log_likelihood.item())
def test_forward_with_mask(self):
mask = torch.LongTensor([
[1, 1, 1, 1],
[1, 1, 0, 0],
[1, 1, 1, 0]
])
log_likelihood = self.marginal_crf(self.emissions, self.marginal_tags, mask)
manual_log_likelihood = 0.0
# Calculate available path score
for emission, tag, mask_i in zip(self.emissions, self.tags, mask):
sequence_length = torch.sum(mask_i.detach())
emission = emission.data[:sequence_length]
tag = tag.data[:sequence_length]
gold_scores = []
for tags_j in itertools.product(range(self.num_tags), repeat=sequence_length):
flag = True
for ti, tj in zip(tag.tolist(), tags_j):
if ti == -1:
continue
else:
if ti != tj:
flag = False
break
if flag:
gold_scores.append(manually_score(self.transitions_from_start, self.transitions_to_end, self.transitions, emission.detach(), tags_j))
forward_scores = [manually_score(self.transitions_from_start, self.transitions_to_end, self.transitions, emission.detach(), tags_j)
for tags_j in itertools.product(range(self.num_tags), repeat=sequence_length)]
numerator = math.log(sum(math.exp(forward_score) for forward_score in gold_scores))
denominator = math.log(sum(math.exp(forward_score) for forward_score in forward_scores))
manual_log_likelihood += denominator - numerator
assert manual_log_likelihood == approx(log_likelihood.item())
| 44.830275
| 153
| 0.523688
| 1,425
| 9,773
| 3.448421
| 0.06807
| 0.062271
| 0.070208
| 0.079772
| 0.898657
| 0.883801
| 0.883394
| 0.883394
| 0.87851
| 0.87851
| 0
| 0.092784
| 0.325079
| 9,773
| 218
| 154
| 44.830275
| 0.652213
| 0.013097
| 0
| 0.790055
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022099
| 1
| 0.038674
| false
| 0
| 0.033149
| 0
| 0.088398
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a69a6345ae4342baf11ffaf189972d9fbf73972
| 26,294
|
py
|
Python
|
tests/test_webscraper.py
|
HobnobMancer/cazy_webscraper
|
3f74492f46db2093f7e6cd91fffcb8347694e54e
|
[
"MIT"
] | 3
|
2020-10-22T08:31:29.000Z
|
2021-05-19T13:13:12.000Z
|
tests/test_webscraper.py
|
HobnobMancer/cazy_webscraper
|
3f74492f46db2093f7e6cd91fffcb8347694e54e
|
[
"MIT"
] | 62
|
2020-11-30T11:29:20.000Z
|
2022-03-28T13:50:30.000Z
|
tests/test_webscraper.py
|
HobnobMancer/cazy_webscraper
|
3f74492f46db2093f7e6cd91fffcb8347694e54e
|
[
"MIT"
] | 1
|
2021-03-10T16:30:11.000Z
|
2021-03-10T16:30:11.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# (c) University of St Andrews 2020-2021
# (c) University of Strathclyde 2020-2021
# (c) James Hutton Institute 2020-2021
#
# Author:
# Emma E. M. Hobbs
#
# Contact
# eemh1@st-andrews.ac.uk
#
# Emma E. M. Hobbs,
# Biomolecular Sciences Building,
# University of St Andrews,
# North Haugh Campus,
# St Andrews,
# KY16 9ST
# Scotland,
# UK
#
# The MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Tests the script cazy_webscraper.py which coordinates the scraping of CAZy.
These test are intened to be run from the root of the repository using:
pytest -v
"""
import os
import pytest
import sys
from argparse import Namespace, ArgumentParser
from scraper import cazy_webscraper, crawler, sql, utilities
from scraper.crawler.cazy_html_pages import get_cazy_pages, parse_local_pages
from scraper.crawler.parse_cazy_families import scrape_all, scrape_by_kingdom
from scraper.utilities import file_io, parse_configuration, parsers
@pytest.fixture
def taxonomic_filter_dict():
"""Dict returned from parse_configuration when no tax filters given."""
taxonomy_filter = {"genera": [], "species": [], "strains": []}
return taxonomy_filter
@pytest.fixture
def input_dir(test_input_dir):
dir_path = test_input_dir / "test_inputs_webscraper"
return dir_path
@pytest.fixture
def output_dir(test_dir):
path = test_dir / "test_outputs"
return path
@pytest.fixture
def db_path():
db_path = "tests/test_inputs/unit_test_database/unit_test_2021-04-27--11-54-58.db"
return db_path
@pytest.fixture
def logs_dir(output_dir):
path_ = output_dir / "test_webscraper" / "test_logs"
return path_
@pytest.fixture
def args_get_cazy_data(logs_dir):
argsdict = {
"args": Namespace(
subfamilies=True,
retries=2,
timeout=5,
output=logs_dir,
)
}
return argsdict
@pytest.fixture
def args_get_cazy_data_stdout(logs_dir):
argsdict = {
"args": Namespace(
subfamilies=True,
retries=2,
timeout=5,
output=sys.stdout,
)
}
return argsdict
@pytest.fixture
def config_dict():
configuration_dict = {
"Glycoside Hydrolases (GHs)": ["GH3"],
"Polysaccharide Lyases (PLs)": None,
}
return configuration_dict
# test main()
def test_main_get_pages(output_dir, cazy_dictionary, taxonomic_filter_dict, monkeypatch):
"""Test function main() when retrieval of CAZy HTML pages is enabled.
Argv is None, logger is None, args.output is not sys.stdout, args.subfamilies is True.
"""
def mock_building_parser(*args, **kwargs):
parser_args = ArgumentParser(
prog="cazy_webscraper.py",
usage=None,
description="Scrape the CAZy database",
conflict_handler="error",
add_help=True,
)
return parser_args
def mock_parser(*args, **kwargs):
parser = Namespace(
config=None,
classes=None,
database="fake_database_path",
ec=None,
force=False,
families=None,
genera=None,
get_pages=True,
kingdoms=None,
log=None,
nodelete=False,
output=output_dir,
retries=10,
scrape_files=None,
subfamilies=True,
species=None,
strains=None,
streamline=None,
timeout=45,
verbose=False,
)
return parser
def mock_config_logger(*args, **kwargs):
return
def mock_making_output_dir(*args, **kwargs):
return
def mock_retrieving_configuration(*args, **kwargs):
# excluded_classes, config_dict, cazy_dict, taxonomy_filter, kingdoms, ec_filter
return None, None, cazy_dictionary, taxonomic_filter_dict, [], []
def mock_filter_set(*args, **kwargs):
return set()
def mock_get_pages(*args, **kwargs):
return
monkeypatch.setattr(parsers, "build_parser", mock_building_parser)
monkeypatch.setattr(ArgumentParser, "parse_args", mock_parser)
monkeypatch.setattr(utilities, "config_logger", mock_config_logger)
monkeypatch.setattr(file_io, "make_output_directory", mock_making_output_dir)
monkeypatch.setattr(parse_configuration, "parse_configuration", mock_retrieving_configuration)
monkeypatch.setattr(cazy_webscraper, "get_filter_set", mock_filter_set)
monkeypatch.setattr(get_cazy_pages, "get_cazy_pages", mock_get_pages)
cazy_webscraper.main()
def test_main_invalid_db_path(output_dir, cazy_dictionary, taxonomic_filter_dict, monkeypatch):
"""Test function main() when an invalid db path is given.
Argv is None, logger is None, args.output is not sys.stdout, args.subfamilies is True.
"""
def mock_building_parser(*args, **kwargs):
parser_args = ArgumentParser(
prog="cazy_webscraper.py",
usage=None,
description="Scrape the CAZy database",
conflict_handler="error",
add_help=True,
)
return parser_args
def mock_parser(*args, **kwargs):
parser = Namespace(
config=None,
classes=None,
database="fake_database_path",
ec=None,
force=False,
families=None,
genera=None,
get_pages=False,
kingdoms=None,
log=None,
nodelete=False,
output=output_dir,
retries=10,
scrape_files=None,
subfamilies=True,
species=None,
strains=None,
streamline=None,
timeout=45,
verbose=False,
)
return parser
def mock_config_logger(*args, **kwargs):
return
def mock_making_output_dir(*args, **kwargs):
return
def mock_retrieving_configuration(*args, **kwargs):
# excluded_classes, config_dict, cazy_dict, taxonomy_filter, kingdoms, ec_filter
return None, None, cazy_dictionary, taxonomic_filter_dict, [], []
def mock_filter_set(*args, **kwargs):
return set()
monkeypatch.setattr(parsers, "build_parser", mock_building_parser)
monkeypatch.setattr(ArgumentParser, "parse_args", mock_parser)
monkeypatch.setattr(utilities, "config_logger", mock_config_logger)
monkeypatch.setattr(file_io, "make_output_directory", mock_making_output_dir)
monkeypatch.setattr(parse_configuration, "parse_configuration", mock_retrieving_configuration)
monkeypatch.setattr(cazy_webscraper, "get_filter_set", mock_filter_set)
with pytest.raises(SystemExit) as pytest_wrapped_e:
cazy_webscraper.main()
assert pytest_wrapped_e.type == SystemExit
def test_main_db_raises_error(output_dir, cazy_dictionary, taxonomic_filter_dict, monkeypatch):
"""Test function main() when aan error is raised when building the local db."""
def mock_building_parser(*args, **kwargs):
parser_args = ArgumentParser(
prog="cazy_webscraper.py",
usage=None,
description="Scrape the CAZy database",
conflict_handler="error",
add_help=True,
)
return parser_args
def mock_parser(*args, **kwargs):
parser = Namespace(
config=None,
classes=None,
database=None,
ec=None,
force=False,
families=None,
genera=None,
get_pages=False,
kingdoms=None,
log=None,
nodelete=False,
output=output_dir,
retries=10,
scrape_files=None,
subfamilies=True,
species=None,
strains=None,
streamline=None,
timeout=45,
verbose=False,
)
return parser
def mock_config_logger(*args, **kwargs):
return
def mock_making_output_dir(*args, **kwargs):
return
def mock_retrieving_configuration(*args, **kwargs):
# excluded_classes, config_dict, cazy_dict, taxonomy_filter, kingdoms, ec_filter
return None, None, cazy_dictionary, taxonomic_filter_dict, [], []
def mock_filter_set(*args, **kwargs):
return set()
def mock_db_build(*args, **kwargs):
raise TypeError
monkeypatch.setattr(parsers, "build_parser", mock_building_parser)
monkeypatch.setattr(ArgumentParser, "parse_args", mock_parser)
monkeypatch.setattr(utilities, "config_logger", mock_config_logger)
monkeypatch.setattr(file_io, "make_output_directory", mock_making_output_dir)
monkeypatch.setattr(parse_configuration, "parse_configuration", mock_retrieving_configuration)
monkeypatch.setattr(cazy_webscraper, "get_filter_set", mock_filter_set)
monkeypatch.setattr(sql.sql_orm, "build_db", mock_db_build)
with pytest.raises(SystemExit) as pytest_wrapped_e:
cazy_webscraper.main()
assert pytest_wrapped_e.type == SystemExit
def test_main_existing_db_scrape_local_pages(
output_dir,
null_logger,
cazy_dictionary,
db_path,
input_dir,
monkeypatch,
):
"""Test function main() when passed an existing database scraping data from local HTML pages.
Argv is not None, logger is not None, args.output is output_dir, args.subfamilies is True,
and valid db path is given by db_path.
"""
def mock_building_parser(*args, **kwargs):
parser_args = ArgumentParser(
prog="cazy_webscraper.py",
usage=None,
description="Scrape the CAZy database",
conflict_handler="error",
add_help=True,
)
return parser_args
def mock_parser(*args, **kwargs):
parser = Namespace(
config=None,
classes=None,
database=db_path,
ec=None,
force=False,
families=None,
genera=None,
get_pages=False,
kingdoms=None,
log=None,
nodelete=False,
output=output_dir,
retries=10,
scrape_files=input_dir,
subfamilies=True,
species=None,
strains=None,
streamline="streamline_args",
timeout=45,
verbose=False,
)
return parser
def mock_config_logger(*args, **kwargs):
return
def mock_making_output_dir(*args, **kwargs):
return
def mock_retrieving_configuration(*args, **kwargs):
# excluded_classes, config_dict, cazy_dict, taxonomy_filter, kingdoms, ec_filter
return None, None, cazy_dictionary, taxonomic_filter_dict, [], []
def mock_filter_set(*args, **kwargs):
return set()
def mock_none(*args, **kwargs):
return
monkeypatch.setattr(parsers, "build_parser", mock_building_parser)
monkeypatch.setattr(ArgumentParser, "parse_args", mock_parser)
monkeypatch.setattr(utilities, "config_logger", mock_config_logger)
monkeypatch.setattr(file_io, "make_output_directory", mock_making_output_dir)
monkeypatch.setattr(parse_configuration, "parse_configuration", mock_retrieving_configuration)
monkeypatch.setattr(cazy_webscraper, "get_filter_set", mock_filter_set)
monkeypatch.setattr(parse_configuration, "create_streamline_scraping_warning", mock_none)
monkeypatch.setattr(sql.sql_interface, "log_scrape_in_db", mock_none)
monkeypatch.setattr(parse_local_pages, "parse_local_pages", mock_none)
cazy_webscraper.main(["argv"])
def test_main_existing_db_scrape_direct(
output_dir,
null_logger,
cazy_dictionary,
db_path,
input_dir,
monkeypatch,
):
"""Test function main() when passed an existing database scraping CAZy directly.
Argv is not None, logger is not None, args.output is output_dir, args.subfamilies is True,
and valid db path is given by db_path.
"""
def mock_building_parser(*args, **kwargs):
parser_args = ArgumentParser(
prog="cazy_webscraper.py",
usage=None,
description="Scrape the CAZy database",
conflict_handler="error",
add_help=True,
)
return parser_args
def mock_parser(*args, **kwargs):
parser = Namespace(
config=None,
classes=None,
database=db_path,
ec=None,
force=False,
families=None,
genera=None,
get_pages=False,
kingdoms=None,
log=None,
nodelete=False,
output=output_dir,
retries=10,
scrape_files=None,
subfamilies=True,
species=None,
strains=None,
streamline="streamline_args",
timeout=45,
verbose=False,
)
return parser
def mock_config_logger(*args, **kwargs):
return
def mock_making_output_dir(*args, **kwargs):
return
def mock_retrieving_configuration(*args, **kwargs):
# excluded_classes, config_dict, cazy_dict, taxonomy_filter, kingdoms, ec_filter
return None, None, cazy_dictionary, taxonomic_filter_dict, [], []
def mock_filter_set(*args, **kwargs):
return set()
def mock_none(*args, **kwargs):
return
monkeypatch.setattr(parsers, "build_parser", mock_building_parser)
monkeypatch.setattr(ArgumentParser, "parse_args", mock_parser)
monkeypatch.setattr(utilities, "config_logger", mock_config_logger)
monkeypatch.setattr(file_io, "make_output_directory", mock_making_output_dir)
monkeypatch.setattr(parse_configuration, "parse_configuration", mock_retrieving_configuration)
monkeypatch.setattr(cazy_webscraper, "get_filter_set", mock_filter_set)
monkeypatch.setattr(parse_configuration, "create_streamline_scraping_warning", mock_none)
monkeypatch.setattr(sql.sql_interface, "log_scrape_in_db", mock_none)
monkeypatch.setattr(cazy_webscraper, "get_cazy_data", mock_none)
cazy_webscraper.main(["argv"])
def test_main_new_database(output_dir, cazy_dictionary, monkeypatch):
"""Test main() when a new database_file is created"""
def mock_building_parser(*args, **kwargs):
parser_args = ArgumentParser(
prog="cazy_webscraper.py",
usage=None,
description="Scrape the CAZy database",
conflict_handler="error",
add_help=True,
)
return parser_args
output_path = output_dir / "test_webscraper" / "temp_dir_for_db"
os.makedirs(output_path, exist_ok=True)
def mock_parser(*args, **kwargs):
parser = Namespace(
config=None,
classes=None,
database=None,
ec=None,
force=True,
families=None,
genera=None,
get_pages=False,
kingdoms=None,
log=None,
nodelete=False,
output=output_path,
retries=10,
scrape_files=None,
subfamilies=True,
species=None,
strains=None,
streamline="streamline_args",
timeout=45,
verbose=False,
)
return parser
def mock_config_logger(*args, **kwargs):
return
def mock_retrieving_configuration(*args, **kwargs):
# excluded_classes, config_dict, cazy_dict, taxonomy_filter, kingdoms, ec_filter
return None, None, cazy_dictionary, taxonomic_filter_dict, [], []
def mock_filter_set(*args, **kwargs):
return set()
def mock_none(*args, **kwargs):
return
monkeypatch.setattr(parsers, "build_parser", mock_building_parser)
monkeypatch.setattr(ArgumentParser, "parse_args", mock_parser)
monkeypatch.setattr(utilities, "config_logger", mock_config_logger)
monkeypatch.setattr(parse_configuration, "parse_configuration", mock_retrieving_configuration)
monkeypatch.setattr(cazy_webscraper, "get_filter_set", mock_filter_set)
monkeypatch.setattr(parse_configuration, "create_streamline_scraping_warning", mock_none)
monkeypatch.setattr(sql.sql_interface, "log_scrape_in_db", mock_none)
monkeypatch.setattr(cazy_webscraper, "get_cazy_data", mock_none)
cazy_webscraper.main(["argv"])
# delete newly build db
file_io.make_output_directory(output_dir, True, False)
# test get_filter_set
def test_get_filter_set():
"""Test get_filter_set."""
taxonomy_filter = {"genera": ["Aspergillus", "Trichoderma"], "strains": []}
cazy_webscraper.get_filter_set(taxonomy_filter)
def test_get_filter_set_none():
"""Test get_filter_set when no filters are provided."""
taxonomy_filter = {}
assert cazy_webscraper.get_filter_set(taxonomy_filter) is None
# test get_cazy_data()
def test_get_cazy_data_no_fam_urls(
cazy_home_url,
cazy_dictionary,
config_dict,
time_stamp,
args_get_cazy_data,
logs_dir,
monkeypatch,
null_logger
):
"""Test get_cazy_data() when no family URLS are retrieved."""
os.makedirs(logs_dir, exist_ok=True)
def mock_get_classes(*args, **kwargs):
class1 = crawler.CazyClass("test_class", "test_class_url.html", 0)
return [class1]
def mock_get_families(*args, **kwargs):
return None, "test error message", ["test_url1", "test_url2"]
def mock_logger(*args, **kwargs):
return null_logger
monkeypatch.setattr(crawler, "get_cazy_classes", mock_get_classes)
monkeypatch.setattr(crawler, "get_cazy_family_urls", mock_get_families)
monkeypatch.setattr(utilities, "build_logger", mock_logger)
cazy_webscraper.get_cazy_data(
cazy_home=cazy_home_url,
excluded_classes=None,
config_dict=config_dict,
cazy_dict=cazy_dictionary,
taxonomy_filters=set(),
kingdoms="all",
ec_filters=[],
time_stamp="timestamp",
session="session_representative",
args=args_get_cazy_data["args"],
)
file_io.make_output_directory(logs_dir, True, False)
def test_get_cazy_data_no_all(
time_stamp,
cazy_home_url,
cazy_dictionary,
args_get_cazy_data,
logs_dir,
monkeypatch,
):
"""Test get_cazy_data() when no kingdoms are specified and config_dict is None."""
# prepare dir for log files
os.makedirs(logs_dir, exist_ok=True)
fam1 = crawler.Family("test_fam", "test_class", "test_url")
def mock_get_classes(*args, **kwargs):
class1 = crawler.CazyClass(
name="test_class",
url="test_class_url.html",
tries=0,
)
return [class1]
def mock_get_fam_urls(*args, **kwargs):
return [fam1], "error message", ["in", "cor", "rect", "urls"]
def mock_parse_family(*args, **kwargs):
return fam1, True, ["fail1", "fail2"], ["sqlFail1", "sqlFail2"], ["format error"], "session"
monkeypatch.setattr(crawler, "get_cazy_classes", mock_get_classes)
monkeypatch.setattr(crawler, "get_cazy_family_urls", mock_get_fam_urls)
monkeypatch.setattr(scrape_all, "parse_family_via_all_pages", mock_parse_family)
cazy_webscraper.get_cazy_data(
cazy_home=cazy_home_url,
excluded_classes=None,
config_dict=None,
cazy_dict=None,
taxonomy_filters=set(),
kingdoms="all",
ec_filters=[],
time_stamp="timestamp",
session="session_representative",
args=args_get_cazy_data["args"],
)
file_io.make_output_directory(logs_dir, True, False)
def test_get_cazy_data_no_config_dict_kingdom(
time_stamp,
cazy_home_url,
cazy_dictionary,
args_get_cazy_data,
logs_dir,
monkeypatch,
):
"""Test get_cazy_data() when kingdoms are specified and config_dict is None."""
# prepare dir for log files
os.makedirs(logs_dir, exist_ok=True)
fam1 = crawler.Family("test_fam", "test_class", "test_url")
def mock_get_classes(*args, **kwargs):
class1 = crawler.CazyClass(
name="test_class",
url="test_class_url.html",
tries=0,
failed_families={fam1: 0},
)
return [class1]
def mock_parse_family(*args, **kwargs):
return fam1, True, ["fail1", "fail2"], ["sqlFail1", "sqlFail2"], ["format error"], "session"
monkeypatch.setattr(crawler, "get_cazy_classes", mock_get_classes)
monkeypatch.setattr(scrape_by_kingdom, "parse_family_by_kingdom", mock_parse_family)
cazy_webscraper.get_cazy_data(
cazy_home=cazy_home_url,
excluded_classes=None,
config_dict=None,
cazy_dict=None,
taxonomy_filters=set(),
kingdoms=["Bacteria", "Viruses"],
ec_filters=[],
time_stamp="timestamp",
session="session_representative",
args=args_get_cazy_data["args"],
)
file_io.make_output_directory(logs_dir, True, False)
def test_get_cazy_data_config_data_all(
time_stamp,
cazy_home_url,
cazy_dictionary,
args_get_cazy_data,
logs_dir,
monkeypatch,
):
"""Test get_cazy_data() when no kingdoms are specified and configuration given."""
# prepare dir for log files
os.makedirs(logs_dir, exist_ok=True)
fam1 = crawler.Family("GH3_1", "test_class", "test_url")
config_dict = {"Glycoside Hydrolases": ["GH3"]}
def mock_get_classes(*args, **kwargs):
class1 = crawler.CazyClass(
name="Glycoside Hydrolases",
url="test_class_url.html",
tries=0,
failed_families={fam1: 0},
)
return [class1]
def mock_parse_family(*args, **kwargs):
return fam1, True, ["fail1", "fail2"], ["sqlFail1", "sqlFail2"], ["format error"], "session"
monkeypatch.setattr(crawler, "get_cazy_classes", mock_get_classes)
monkeypatch.setattr(scrape_all, "parse_family_via_all_pages", mock_parse_family)
cazy_webscraper.get_cazy_data(
cazy_home=cazy_home_url,
excluded_classes=None,
config_dict=config_dict,
cazy_dict=cazy_dictionary,
taxonomy_filters=set(),
kingdoms="all",
ec_filters=[],
time_stamp="timestamp",
session="session_representative",
args=args_get_cazy_data["args"],
)
file_io.make_output_directory(logs_dir, True, False)
def test_get_cazy_data_config_data_kingdom(
time_stamp,
cazy_home_url,
cazy_dictionary,
args_get_cazy_data,
logs_dir,
monkeypatch,
):
"""Test get_cazy_data() when kingdoms are specified and configuration given."""
# prepare dir for log files
os.makedirs(logs_dir, exist_ok=True)
fam1 = crawler.Family("GH1", "test_class", "test_url")
config_dict = {"Glycoside Hydrolases": ["GH1"]}
def mock_get_classes(*args, **kwargs):
class1 = crawler.CazyClass(
name="Glycoside Hydrolases",
url="test_class_url.html",
tries=0,
failed_families={fam1: 0},
)
return [class1]
def mock_parse_family(*args, **kwargs):
return fam1, True, ["fail1", "fail2"], ["sqlFail1", "sqlFail2"], ["format error"], {}
monkeypatch.setattr(crawler, "get_cazy_classes", mock_get_classes)
monkeypatch.setattr(scrape_by_kingdom, "parse_family_by_kingdom", mock_parse_family)
cazy_webscraper.get_cazy_data(
cazy_home=cazy_home_url,
excluded_classes=None,
config_dict=config_dict,
cazy_dict=cazy_dictionary,
taxonomy_filters=set(),
kingdoms=["Bacteria", "Viruses"],
ec_filters=[],
time_stamp="timestamp",
session={},
args=args_get_cazy_data["args"],
)
file_io.make_output_directory(logs_dir, True, False)
def test_get_cazy_data_config_data_kingdom_stdout(
time_stamp,
cazy_home_url,
cazy_dictionary,
args_get_cazy_data_stdout,
logs_dir,
monkeypatch,
):
"""Test get_cazy_data() when kingdoms are specified and configuration given."""
# prepare dir for log files
os.makedirs(logs_dir, exist_ok=True)
fam1 = crawler.Family("GH1", "test_class", "test_url")
config_dict = {"Glycoside Hydrolases": ["GH1"]}
def mock_get_classes(*args, **kwargs):
class1 = crawler.CazyClass(
name="Glycoside Hydrolases",
url="test_class_url.html",
tries=0,
failed_families={fam1: 0},
)
return [class1]
def mock_parse_family(*args, **kwargs):
return fam1, True, ["fail1", "fail2"], ["sqlFail1", "sqlFail2"], ["format error"], {}
monkeypatch.setattr(crawler, "get_cazy_classes", mock_get_classes)
monkeypatch.setattr(scrape_by_kingdom, "parse_family_by_kingdom", mock_parse_family)
cazy_webscraper.get_cazy_data(
cazy_home=cazy_home_url,
excluded_classes=None,
config_dict=config_dict,
cazy_dict=cazy_dictionary,
taxonomy_filters=set(),
kingdoms=["Bacteria", "Viruses"],
ec_filters=[],
time_stamp="timestamp",
session={},
args=args_get_cazy_data_stdout["args"],
)
file_io.make_output_directory(logs_dir, True, False)
| 31.265161
| 100
| 0.657945
| 3,094
| 26,294
| 5.296703
| 0.104396
| 0.065902
| 0.023493
| 0.012814
| 0.813949
| 0.797779
| 0.790578
| 0.777459
| 0.774713
| 0.76855
| 0
| 0.006991
| 0.243782
| 26,294
| 840
| 101
| 31.302381
| 0.81719
| 0.138435
| 0
| 0.82459
| 0
| 0
| 0.112664
| 0.022604
| 0
| 0
| 0
| 0
| 0.004918
| 1
| 0.12459
| false
| 0
| 0.013115
| 0.057377
| 0.237705
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a707dceb0fa3cf47303e31b81af1d4308483daa
| 3,726
|
py
|
Python
|
tests/unit_tests/test_UpdatedImageProcessing_target_analyzer.py
|
liyu711/SUAS
|
2f6592fc2ab316475eeabe2f4828e5ba5c1a4b0b
|
[
"MIT"
] | null | null | null |
tests/unit_tests/test_UpdatedImageProcessing_target_analyzer.py
|
liyu711/SUAS
|
2f6592fc2ab316475eeabe2f4828e5ba5c1a4b0b
|
[
"MIT"
] | null | null | null |
tests/unit_tests/test_UpdatedImageProcessing_target_analyzer.py
|
liyu711/SUAS
|
2f6592fc2ab316475eeabe2f4828e5ba5c1a4b0b
|
[
"MIT"
] | null | null | null |
import unittest
from PIL import Image
from UpdatedImageProcessing.TargetDetection import *
class TargetAnalyzerTestCase(unittest.TestCase):
def setUp(self):
self.test_image1 = Image.open("tests/images/image1_test_image_bounder.png")
def test_find_target_average_color(self):
self.target_average_color1 = TargetAnalyzer.find_target_average_color(self.test_image1, (140, 100, 40, 40))
self.target_average_color2 = TargetAnalyzer.find_target_average_color(self.test_image1, (460, 100, 40, 40))
self.target_average_color3 = TargetAnalyzer.find_target_average_color(self.test_image1, (300, 340, 40, 40))
self.target_average_color4 = TargetAnalyzer.find_target_average_color(self.test_image1, (460, 220, 40, 40))
self.assertTrue(abs(self.target_average_color1[0] - 255) < 5)
self.assertTrue(abs(self.target_average_color1[1] - 255) < 5)
self.assertTrue(abs(self.target_average_color1[2] - 255) < 5)
self.assertTrue(abs(self.target_average_color2[0] - 255) < 5)
self.assertTrue(abs(self.target_average_color2[1] - 0) < 5)
self.assertTrue(abs(self.target_average_color2[2] - 0) < 5)
self.assertTrue(abs(self.target_average_color3[0] - 0) < 5)
self.assertTrue(abs(self.target_average_color3[1] - 102) < 5)
self.assertTrue(abs(self.target_average_color3[2] - 153) < 5)
self.assertTrue(abs(self.target_average_color4[0] - 102) < 5)
self.assertTrue(abs(self.target_average_color4[1] - 0) < 5)
self.assertTrue(abs(self.target_average_color4[2] - 153) < 5)
def test_find_surrounding_average_color(self):
self.surrounding_average_color1 = TargetAnalyzer.find_surrounding_average_color(self.test_image1, (140, 100, 40, 40))
self.surrounding_average_color2 = TargetAnalyzer.find_surrounding_average_color(self.test_image1, (460, 100, 40, 40))
self.surrounding_average_color3 = TargetAnalyzer.find_surrounding_average_color(self.test_image1, (300, 340, 40, 40))
self.surrounding_average_color4 = TargetAnalyzer.find_surrounding_average_color(self.test_image1, (460, 220, 40, 40))
self.assertTrue(abs(self.surrounding_average_color1[0] - 255) < 5)
self.assertTrue(abs(self.surrounding_average_color1[1] - 255) < 5)
self.assertTrue(abs(self.surrounding_average_color1[2] - 255) < 5)
self.assertTrue(abs(self.surrounding_average_color2[0] - 255) < 5)
self.assertTrue(abs(self.surrounding_average_color2[1] - 0) < 5)
self.assertTrue(abs(self.surrounding_average_color2[2] - 0) < 5)
self.assertTrue(abs(self.surrounding_average_color3[0] - 0) < 5)
self.assertTrue(abs(self.surrounding_average_color3[1] - 95) < 5)
self.assertTrue(abs(self.surrounding_average_color3[2] - 159) < 5)
self.assertTrue(abs(self.surrounding_average_color4[0] - 95) < 5)
self.assertTrue(abs(self.surrounding_average_color4[1] - 0) < 5)
self.assertTrue(abs(self.surrounding_average_color4[2] - 159) < 5)
def test_find_rim_average_color(self):
self.rim_average_color = TargetAnalyzer.find_rim_average_color(self.test_image1)
self.assertTrue(abs(self.rim_average_color[0] - 127) < 5)
self.assertTrue(abs(self.rim_average_color[1] - 127) < 5)
self.assertTrue(abs(self.rim_average_color[2] - 127) < 5)
def test_find_average_corner_color(self):
self.average_corner_color = TargetAnalyzer.find_average_corner_color(self.test_image1)
self.assertTrue(abs(self.average_corner_color[0] - 127) < 5)
self.assertTrue(abs(self.average_corner_color[1] - 127) < 5)
self.assertTrue(abs(self.average_corner_color[2] - 127) < 5)
| 55.61194
| 125
| 0.72088
| 509
| 3,726
| 5.001965
| 0.100196
| 0.164965
| 0.200314
| 0.247447
| 0.839356
| 0.77337
| 0.749018
| 0.733307
| 0.55381
| 0.139042
| 0
| 0.08192
| 0.161299
| 3,726
| 66
| 126
| 56.454545
| 0.7328
| 0
| 0
| 0
| 0
| 0
| 0.011272
| 0.011272
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.1
| false
| 0
| 0.06
| 0
| 0.18
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6aa91569b43ce7f7b487cd1b09e35a7eba6b5c46
| 177
|
py
|
Python
|
cassie/__init__.py
|
RohanPankaj/apex
|
74e96386bf9446d1179106d6d65ea0368c1b5b27
|
[
"MIT"
] | null | null | null |
cassie/__init__.py
|
RohanPankaj/apex
|
74e96386bf9446d1179106d6d65ea0368c1b5b27
|
[
"MIT"
] | null | null | null |
cassie/__init__.py
|
RohanPankaj/apex
|
74e96386bf9446d1179106d6d65ea0368c1b5b27
|
[
"MIT"
] | null | null | null |
from .cassie_env import CassieEnv
from .taskspace_env import CassieTSEnv
from .ik_env import CassieIKEnv
from .no_delta_env import CassieEnv_nodelta
from .cassiemujoco import *
| 29.5
| 43
| 0.853107
| 25
| 177
| 5.8
| 0.52
| 0.248276
| 0.248276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112994
| 177
| 6
| 44
| 29.5
| 0.923567
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6ae20a706746dfd0b5112c3c04e143edfe9b0344
| 1,191
|
py
|
Python
|
Project Euler/#8.py
|
cRohda/Comp-Sci
|
07e36fdcca242f5aa4f3e11440e9c77616973031
|
[
"Apache-2.0"
] | null | null | null |
Project Euler/#8.py
|
cRohda/Comp-Sci
|
07e36fdcca242f5aa4f3e11440e9c77616973031
|
[
"Apache-2.0"
] | null | null | null |
Project Euler/#8.py
|
cRohda/Comp-Sci
|
07e36fdcca242f5aa4f3e11440e9c77616973031
|
[
"Apache-2.0"
] | null | null | null |
thousand = '731671765313306249192251196744265747423553491949349698352031277450632623957831801698480186947885184385861' \
'560789112949495459501737958331952853208805511125406987471585238630507156932909632952274430435576689664895' \
'044524452316173185640309871112172238311362229893423380308135336276614282806444486645238749303589072962904' \
'915604407723907138105158593079608667017242712188399879790879227492190169972088809377665727333001053367881' \
'220235421809751254540594752243525849077116705560136048395864467063244157221553975369781797784617406495514' \
'929086256932197846862248283972241375657056057490261407972968652414535100474821663704844031998900088952434' \
'506585412275886668811642717147992444292823086346567481391912316282458617866458359124566529476545682848912' \
'883142607690042242190226710556263211111093705442175069416589604080719840385096245544436298123098787992724' \
'428490918884580156166097919133875499200524063689912560717606058861164671094050775410022569831552000559357' \
'2972571636269561882670428252483600823257530420752963450'
a = thousand.split('0')
print(a)
| 91.615385
| 120
| 0.86398
| 17
| 1,191
| 60.529412
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.938144
| 0.104114
| 1,191
| 12
| 121
| 99.25
| 0.026242
| 0
| 0
| 0
| 0
| 0
| 0.84047
| 0.839631
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a7d7c4b28068c634b6d4d815a52928ff2251098
| 534,243
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs_/link_attributes/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs_/link_attributes/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs_/link_attributes/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isis-neighbor-attribute/neighbors/neighbor/subTLVs/subTLVs/link-attributes/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters of IS Extended Reachability sub-TLV 19.
"""
__slots__ = ("_path_helper", "_extmethods", "__subtlv_type", "__local_protection")
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__subtlv_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
},
),
is_leaf=True,
yang_name="subtlv-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
self.__local_protection = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL_PROTECTION": {}, "LINK_EXCLUDED": {}},
)
),
is_leaf=False,
yang_name="local-protection",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isis-neighbor-attribute",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"link-attributes",
"state",
]
def _get_subtlv_type(self):
"""
Getter method for subtlv_type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/link_attributes/state/subtlv_type (identityref)
YANG Description: The type of subTLV being described. The type of subTLV is
expressed as a canonical name.
"""
return self.__subtlv_type
def _set_subtlv_type(self, v, load=False):
"""
Setter method for subtlv_type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/link_attributes/state/subtlv_type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_subtlv_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subtlv_type() directly.
YANG Description: The type of subTLV being described. The type of subTLV is
expressed as a canonical name.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
},
),
is_leaf=True,
yang_name="subtlv-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """subtlv_type must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ISIS_TLV22_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV22_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV23_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV23_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV135_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV135_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV141_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV141_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV222_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV222_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV223_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV223_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV235_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV235_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV236_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV236_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV237_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV237_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV242_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV242_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV242_SR_CAPABILITY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV242_SR_CAPABILITY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV242_SR_ALGORITHM': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV242_SR_ALGORITHM': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}},), is_leaf=True, yang_name="subtlv-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
}
)
self.__subtlv_type = t
if hasattr(self, "_set"):
self._set()
def _unset_subtlv_type(self):
self.__subtlv_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
},
),
is_leaf=True,
yang_name="subtlv-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
def _get_local_protection(self):
"""
Getter method for local_protection, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/link_attributes/state/local_protection (enumeration)
YANG Description: Link local-protection attributes.
"""
return self.__local_protection
def _set_local_protection(self, v, load=False):
"""
Setter method for local_protection, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/link_attributes/state/local_protection (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_protection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_protection() directly.
YANG Description: Link local-protection attributes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL_PROTECTION": {}, "LINK_EXCLUDED": {}},
)
),
is_leaf=False,
yang_name="local-protection",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """local_protection must be of a type compatible with enumeration""",
"defined-type": "openconfig-network-instance:enumeration",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'LOCAL_PROTECTION': {}, 'LINK_EXCLUDED': {}},)), is_leaf=False, yang_name="local-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='enumeration', is_config=False)""",
}
)
self.__local_protection = t
if hasattr(self, "_set"):
self._set()
def _unset_local_protection(self):
self.__local_protection = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL_PROTECTION": {}, "LINK_EXCLUDED": {}},
)
),
is_leaf=False,
yang_name="local-protection",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
subtlv_type = __builtin__.property(_get_subtlv_type)
local_protection = __builtin__.property(_get_local_protection)
_pyangbind_elements = OrderedDict(
[("subtlv_type", subtlv_type), ("local_protection", local_protection)]
)
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isis-neighbor-attribute/neighbors/neighbor/subTLVs/subTLVs/link-attributes/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters of IS Extended Reachability sub-TLV 19.
"""
__slots__ = ("_path_helper", "_extmethods", "__subtlv_type", "__local_protection")
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__subtlv_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
},
),
is_leaf=True,
yang_name="subtlv-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
self.__local_protection = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL_PROTECTION": {}, "LINK_EXCLUDED": {}},
)
),
is_leaf=False,
yang_name="local-protection",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isis-neighbor-attribute",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"link-attributes",
"state",
]
def _get_subtlv_type(self):
"""
Getter method for subtlv_type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/link_attributes/state/subtlv_type (identityref)
YANG Description: The type of subTLV being described. The type of subTLV is
expressed as a canonical name.
"""
return self.__subtlv_type
def _set_subtlv_type(self, v, load=False):
"""
Setter method for subtlv_type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/link_attributes/state/subtlv_type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_subtlv_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subtlv_type() directly.
YANG Description: The type of subTLV being described. The type of subTLV is
expressed as a canonical name.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
},
),
is_leaf=True,
yang_name="subtlv-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """subtlv_type must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ISIS_TLV22_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV22_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV22_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV22_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV23_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV23_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV23_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV23_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV135_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV135_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV135_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV135_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV141_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV141_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV141_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV141_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV222_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV222_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV222_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV222_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV223_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV223_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_IPV4_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_IPV4_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_MAX_LINK_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_MAX_RESERVABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_UNRESERVED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_IPV6_INTERFACE_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_IPV6_NEIGHBOR_ADDRESS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_EXTENDED_ADMIN_GROUP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_TE_DEFAULT_METRIC': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_ATTRIBUTES': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_PROTECTION_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_BANDWIDTH_CONSTRAINTS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_UNCONSTRAINED_LSP': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_ADJ_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_ADJ_LAN_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_MIN_MAX_LINK_DELAY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_DELAY_VARIATION': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_LINK_LOSS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_RESIDUAL_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_AVAILABLE_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV223_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV223_UTILIZED_BANDWIDTH': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV235_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV235_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV235_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV235_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV236_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV236_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV236_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV236_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV237_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV237_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_TAG': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_TAG64': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_PREFIX_SID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_PREFIX_FLAGS': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_IPV4_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV237_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV237_IPV6_ROUTER_ID': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'ISIS_TLV242_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:ISIS_TLV242_SUBTLVS_TYPE': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV242_SR_CAPABILITY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV242_SR_CAPABILITY': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'TLV242_SR_ALGORITHM': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}, 'oc-isis-lsdb-types:TLV242_SR_ALGORITHM': {'@module': 'openconfig-isis-lsdb-types', '@namespace': 'http://openconfig.net/yang/isis-lsdb-types'}},), is_leaf=True, yang_name="subtlv-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
}
)
self.__subtlv_type = t
if hasattr(self, "_set"):
self._set()
def _unset_subtlv_type(self):
self.__subtlv_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV22_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV22_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV23_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV23_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV135_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV135_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV141_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV141_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV222_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV222_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV223_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV4_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_LINK_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MAX_RESERVABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNRESERVED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_INTERFACE_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_IPV6_NEIGHBOR_ADDRESS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_EXTENDED_ADMIN_GROUP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_TE_DEFAULT_METRIC": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_ATTRIBUTES": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_PROTECTION_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_BANDWIDTH_CONSTRAINTS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UNCONSTRAINED_LSP": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_ADJ_LAN_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_MIN_MAX_LINK_DELAY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_DELAY_VARIATION": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_LINK_LOSS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_RESIDUAL_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_AVAILABLE_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV223_UTILIZED_BANDWIDTH": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV235_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV235_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV236_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV236_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV237_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_TAG64": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_SID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_PREFIX_FLAGS": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV4_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV237_IPV6_ROUTER_ID": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:ISIS_TLV242_SUBTLVS_TYPE": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_CAPABILITY": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
"oc-isis-lsdb-types:TLV242_SR_ALGORITHM": {
"@module": "openconfig-isis-lsdb-types",
"@namespace": "http://openconfig.net/yang/isis-lsdb-types",
},
},
),
is_leaf=True,
yang_name="subtlv-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
def _get_local_protection(self):
"""
Getter method for local_protection, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/link_attributes/state/local_protection (enumeration)
YANG Description: Link local-protection attributes.
"""
return self.__local_protection
def _set_local_protection(self, v, load=False):
"""
Setter method for local_protection, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/link_attributes/state/local_protection (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_protection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_protection() directly.
YANG Description: Link local-protection attributes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL_PROTECTION": {}, "LINK_EXCLUDED": {}},
)
),
is_leaf=False,
yang_name="local-protection",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """local_protection must be of a type compatible with enumeration""",
"defined-type": "openconfig-network-instance:enumeration",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'LOCAL_PROTECTION': {}, 'LINK_EXCLUDED': {}},)), is_leaf=False, yang_name="local-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='enumeration', is_config=False)""",
}
)
self.__local_protection = t
if hasattr(self, "_set"):
self._set()
def _unset_local_protection(self):
self.__local_protection = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL_PROTECTION": {}, "LINK_EXCLUDED": {}},
)
),
is_leaf=False,
yang_name="local-protection",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
subtlv_type = __builtin__.property(_get_subtlv_type)
local_protection = __builtin__.property(_get_local_protection)
_pyangbind_elements = OrderedDict(
[("subtlv_type", subtlv_type), ("local_protection", local_protection)]
)
| 68.536626
| 42,480
| 0.488448
| 47,052
| 534,243
| 5.404914
| 0.005271
| 0.190003
| 0.308755
| 0.248639
| 0.998525
| 0.997672
| 0.997672
| 0.997672
| 0.997672
| 0.997672
| 0
| 0.020681
| 0.359385
| 534,243
| 7,794
| 42,481
| 68.54542
| 0.722391
| 0.009316
| 0
| 0.756384
| 0
| 0.000521
| 0.569109
| 0.266134
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002084
| false
| 0
| 0.001954
| 0
| 0.006905
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0ab81320973cde728c526aa8f4999b12f0b815dd
| 5,522
|
py
|
Python
|
search_policies/cnn/procedures/fairnas_procedure.py
|
kcyu2014/eval-nas
|
385376a3ef96336b54ee7e696af1d02b97aa5c32
|
[
"MIT"
] | 47
|
2019-11-28T07:08:08.000Z
|
2021-12-09T05:11:37.000Z
|
search_policies/cnn/procedures/fairnas_procedure.py
|
kcyu2014/eval-nas
|
385376a3ef96336b54ee7e696af1d02b97aa5c32
|
[
"MIT"
] | 4
|
2020-01-14T02:16:13.000Z
|
2021-06-16T13:14:52.000Z
|
search_policies/cnn/procedures/fairnas_procedure.py
|
kcyu2014/eval-nas
|
385376a3ef96336b54ee7e696af1d02b97aa5c32
|
[
"MIT"
] | 9
|
2019-12-02T09:12:05.000Z
|
2021-07-06T02:00:15.000Z
|
import IPython
from torch import nn as nn
import search_policies.cnn.utils
from search_policies.cnn.darts_policy import utils as darts_utils
from .train_search_procedure import _summarize_shared_train
def fairnas_train_model_v1(train_queue, valid_queue, model, criterion, optimizer, lr, args, architect,
topology_sampler=None,
op_sampler=None,
):
"""
Implement the
:param train_queue:
:param valid_queue:
:param model:
:param criterion:
:param optimizer:
:param lr:
:param args:
:param architect:
:param topology_sampler:
:param op_sampler:
:return:
"""
# sampler is the very different.
objs = search_policies.cnn.utils.AverageMeter()
top1 = search_policies.cnn.utils.AverageMeter()
top5 = search_policies.cnn.utils.AverageMeter()
for step, (input, target) in enumerate(train_queue):
model.train()
if args.debug:
if step > 10:
print("Break after 10 batch")
break
# For each batch, sample a topology architecture first.
if topology_sampler:
model = topology_sampler(model, architect, args)
n = input.size(0)
input = input.cuda().requires_grad_()
target = target.cuda()
# this is a fairly simple step function logic. update the architecture in each step, before updating the
# weight itself.
if architect:
# get a random minibatch from the search queue with replacement
input_search, target_search = next(iter(valid_queue))
input_search = input_search.requires_grad_().cuda()
target_search = target_search.cuda()
architect.step(input, target, input_search, target_search, lr, optimizer, unrolled=args.unrolled)
loss = 0.0
logits = 0.0
total_model = 0
optimizer.zero_grad()
for _model in iter(op_sampler(model, architect, args)):
# IPython.embed(header='check logits.')
_logits, _ = _model(input)
logits += _logits
_loss = criterion(_logits, target)
_loss.backward()
loss += _loss.item()
total_model += 1
nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip)
optimizer.step()
prec1, prec5 = search_policies.cnn.utils.accuracy(logits, target, topk=(1, 5))
objs.update(loss / total_model, n)
top1.update(prec1.item(), n)
top5.update(prec5.item(), n)
if step % args.report_freq == 0:
_summarize_shared_train(step, objs.avg, objs.avg, top1.avg, top5.avg, lr)
return top1.avg, objs.avg
def fairnas_train_model_v2(train_queue, valid_queue, model, criterion, optimizer, lr, args, architect,
topology_sampler=None,
op_sampler=None,
):
"""
FairNAS with topology and operation fairness.
:param train_queue:
:param valid_queue:
:param model:
:param criterion:
:param optimizer:
:param lr:
:param args:
:param architect:
:param topology_sampler:
:param op_sampler:
:return:
"""
# sampler is the very different.
assert topology_sampler is not None, 'must pass a topology sampler here.'
objs = search_policies.cnn.utils.AverageMeter()
top1 = search_policies.cnn.utils.AverageMeter()
top5 = search_policies.cnn.utils.AverageMeter()
for step, (input, target) in enumerate(train_queue):
model.train()
if args.debug:
if step > 10:
print("Break after 10 batch")
break
# For each batch, sample a topology architecture first
# if topology_sampler:
for topo_model in iter(topology_sampler(model, architect, args)):
n = input.size(0)
input = input.cuda().requires_grad_()
target = target.cuda()
# this is a fairly simple step function logic. update the architecture in each step, before updating the
# weight itself.
if architect:
# get a random minibatch from the search queue with replacement
input_search, target_search = next(iter(valid_queue))
input_search = input_search.requires_grad_().cuda()
target_search = target_search.cuda()
architect.step(input, target, input_search, target_search, lr, optimizer, unrolled=args.unrolled)
loss = 0.0
logits = 0.0
total_model = 0
optimizer.zero_grad()
for _model in iter(op_sampler(topo_model, architect, args)):
# IPython.embed(header='check logits.')
_logits, _ = _model(input)
logits += _logits
_loss = criterion(_logits, target)
_loss.backward()
loss += _loss.item()
total_model += 1
nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip)
optimizer.step()
prec1, prec5 = search_policies.cnn.utils.accuracy(logits, target, topk=(1, 5))
objs.update(loss / total_model, n)
top1.update(prec1.item(), n)
top5.update(prec5.item(), n)
if step % args.report_freq == 0:
_summarize_shared_train(step, objs.avg, objs.avg, top1.avg, top5.avg, lr)
return top1.avg, objs.avg
| 36.091503
| 116
| 0.602137
| 639
| 5,522
| 5.018779
| 0.179969
| 0.043655
| 0.053009
| 0.06174
| 0.891799
| 0.891799
| 0.891799
| 0.891799
| 0.891799
| 0.891799
| 0
| 0.013581
| 0.306592
| 5,522
| 152
| 117
| 36.328947
| 0.823975
| 0.191416
| 0
| 0.857143
| 0
| 0
| 0.017078
| 0
| 0
| 0
| 0
| 0
| 0.010989
| 1
| 0.021978
| false
| 0.010989
| 0.054945
| 0
| 0.098901
| 0.021978
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ac61a5ec92ae4768147f3d81f3c39803179e495
| 1,188
|
py
|
Python
|
obsolete/security.py
|
taylorc1009/Web-Tech-Practicals
|
64dc532e95985d7a15ad7504de52d59f559c84a0
|
[
"Apache-2.0"
] | null | null | null |
obsolete/security.py
|
taylorc1009/Web-Tech-Practicals
|
64dc532e95985d7a15ad7504de52d59f559c84a0
|
[
"Apache-2.0"
] | null | null | null |
obsolete/security.py
|
taylorc1009/Web-Tech-Practicals
|
64dc532e95985d7a15ad7504de52d59f559c84a0
|
[
"Apache-2.0"
] | null | null | null |
### Udemy Section 5
# def authenticate(username, password): # checks if the user entered the correct password for a username
# user = User.find_by_username(username)
# if user and safe_str_cmp(user.password, password): # 'safe_str_cmp' is used to compare different character encodings simply: comparing ASCII with Unicode will cause issues
# return user
# def identity(payload): # returns the user details, acquired by ID
# user_id = payload['identity']
# return User.find_by_id(user_id)
### Udemy Section 4
# users = [
# User(1, 'bob', 'asdf')
# ]
# username_mapping = {u.username: u for u in users}
# userid_mapping = {u.id: u for u in users}
# def authenticate(username, password): # checks if the user entered the correct password for a username
# user = username_mapping.get(username, None)
# if user and safe_str_cmp(user.password, password): # 'safe_str_cmp' is used to compare different character encodings simply: comparing ASCII with Unicode will cause issues
# return user
# def identity(payload): # returns the user details, acquired by ID
# user_id = payload['identity']
# return userid_mapping.get(user_id, None)
| 38.322581
| 177
| 0.71633
| 171
| 1,188
| 4.859649
| 0.315789
| 0.033694
| 0.048135
| 0.036101
| 0.779783
| 0.750903
| 0.750903
| 0.750903
| 0.750903
| 0.750903
| 0
| 0.003122
| 0.191077
| 1,188
| 30
| 178
| 39.6
| 0.861603
| 0.90404
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ade246a97b5fe2ac2f9d7f50332fb01cac20c97
| 826
|
py
|
Python
|
Lib/site-packages/tensorflow/_api/v1/keras/optimizers/__init__.py
|
amitdev81296/tensorflow
|
9869739cc142a996432bef4dc91b1f1b165bc27a
|
[
"bzip2-1.0.6"
] | 1
|
2020-07-06T14:18:59.000Z
|
2020-07-06T14:18:59.000Z
|
keras-ResNet50/tensorflow/_api/v1/keras/optimizers/__init__.py
|
wuh0007/severless_ML_live
|
088b78b06434583b7443ab877a6cdd80121bb8d1
|
[
"MIT"
] | 4
|
2020-09-26T00:55:50.000Z
|
2022-02-10T01:53:06.000Z
|
keras-ResNet50/tensorflow/_api/v1/keras/optimizers/__init__.py
|
wuh0007/severless_ML_live
|
088b78b06434583b7443ab877a6cdd80121bb8d1
|
[
"MIT"
] | null | null | null |
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Built-in optimizer classes.
"""
from __future__ import print_function
from tensorflow.python.keras.optimizers import Adadelta
from tensorflow.python.keras.optimizers import Adagrad
from tensorflow.python.keras.optimizers import Adam
from tensorflow.python.keras.optimizers import Adamax
from tensorflow.python.keras.optimizers import Nadam
from tensorflow.python.keras.optimizers import Optimizer
from tensorflow.python.keras.optimizers import RMSprop
from tensorflow.python.keras.optimizers import SGD
from tensorflow.python.keras.optimizers import deserialize
from tensorflow.python.keras.optimizers import get
from tensorflow.python.keras.optimizers import serialize
del print_function
| 37.545455
| 82
| 0.848668
| 109
| 826
| 6.357798
| 0.357798
| 0.277056
| 0.31746
| 0.396825
| 0.650794
| 0.650794
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087167
| 826
| 21
| 83
| 39.333333
| 0.919098
| 0.186441
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.923077
| 0
| 0.923077
| 0.153846
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7c5499ff3db5079285cd004f30cf69c6ea94e5f4
| 14,921
|
py
|
Python
|
src/hiddb/asynchronous.py
|
hiddb/hiddb-sdk-py
|
37e5a928d050f575d90e0e87b2cf1f5f84792cd4
|
[
"MIT"
] | null | null | null |
src/hiddb/asynchronous.py
|
hiddb/hiddb-sdk-py
|
37e5a928d050f575d90e0e87b2cf1f5f84792cd4
|
[
"MIT"
] | null | null | null |
src/hiddb/asynchronous.py
|
hiddb/hiddb-sdk-py
|
37e5a928d050f575d90e0e87b2cf1f5f84792cd4
|
[
"MIT"
] | null | null | null |
import jwt
import time
import asyncio
import aiohttp
async def set_timeout(seconds, callback, args=None):
await asyncio.sleep(seconds)
await callback(*args) if args else await callback()
secure = True
domain = 'hiddb.io'
protocol = 'https' if secure else 'http'
baseDbUrl = f'{protocol}://api.{domain}'
postHeaders = { 'Content-Type' : 'application/json' }
class HIDDB(object):
@classmethod
async def create(cls, key, secret):
self = HIDDB()
self.state = State(self, key, secret)
await self._machine_login(key, secret)
return self
async def _machine_login(self, key: str, secret: str):
url = baseDbUrl
path = f"/machine/login"
method = "post"
body = {
"access_key": key,
"secret_key": secret
}
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
async with req(path, json=body, headers=postHeaders) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
self.state.access_token = (await resp.json())['access_token']
return self.state.access_token
async def create_database(self, name: str):
url = baseDbUrl
path = f"/database"
method = "post"
body = {
"database_name": name,
}
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path, json=body, headers=postHeaders) as resp:
if resp.status != 202 and resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def list_databases(self):
url = baseDbUrl
path = f"/database"
method = "get"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def get_database(self, id: str):
url = baseDbUrl
path = f"/database/{id}"
method = "get"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def delete_database(self, id: str):
url = baseDbUrl
path = f"/database/{id}"
method = "delete"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 202:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def create_instance(self, database_id: str, type: str, volume_size: str):
url = baseDbUrl
path = f"/instance"
method = "post"
body = {
"database_id": database_id,
"type": type,
"volume_size": volume_size,
}
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path, json=body, headers=postHeaders) as resp:
if resp.status != 202:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def get_instances(self):
url = baseDbUrl
path = f"/instance"
method = "get"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def get_instance(self, id: str):
url = baseDbUrl
path = f"/instance/{id}"
method = "get"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def delete_instance(self, id: str):
url = baseDbUrl
path = f"/instance/{id}"
method = "delete"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 202:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def create_collection(self, database_id: str, collection_name: str):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection"
method = "post"
body = {
"collection_name": collection_name,
}
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path, json=body, headers=postHeaders) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def list_collections(self, database_id: str):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection"
method = "get"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def get_collection(self, database_id: str, collection_name: str):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}"
method = "get"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def delete_collection(self, database_id: str, collection_name: str):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}"
method = "delete"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def create_index(self, database_id: str, collection_name: str, index_name: str, dimension: int):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}/index"
method = "post"
body = {
"field_name": index_name,
"dimension": dimension,
}
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path, json=body, headers=postHeaders) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def list_indices(self, database_id: str, collection_name: str):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}/index"
method = "get"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def get_index(self, database_id: str, collection_name: str, index_name: str):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}/index/{index_name}"
method = "get"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def delete_index(self, database_id: str, collection_name: str, index_name: str):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}/index/{index_name}"
method = "delete"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def insert_document(self, database_id: str, collection_name: str, documents: dict):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}/document"
method = "post"
body = {
"documents": documents,
}
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path, json=body, headers=postHeaders) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.text()
async def search_nearest_documents(self, database_id: str, collection_name: str, index_name: str, vectors=None, ids=None, max_neighbors=10):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}/document/search"
method = "post"
if vectors:
body = {
"vectors": vectors,
"field_name": index_name,
"max_neighbors": max_neighbors
}
if ids:
body = {
"ids": ids,
"field_name": index_name,
"max_neighbors": max_neighbors
}
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path, json=body, headers=postHeaders) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def get_document(self, database_id: str, collection_name: str, document_id: str):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}/document/{document_id}"
method = "get"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
async def delete_document(self, database_id: str, collection_name: str, document_id: str):
url = f"{protocol}://{database_id}.{domain}"
path = f"/collection/{collection_name}/document/{document_id}"
method = "delete"
async with aiohttp.ClientSession(url) as session:
req = getattr(session, method)
session.headers.update({'Authorization' : f'Bearer {self.state.access_token}'})
async with req(path) as resp:
if resp.status != 200:
raise Exception(f"Status code {resp.status}: {await resp.text()}")
return await resp.json()
class State:
def __init__(self, hiddb: HIDDB, key: str, secret: str):
self.hiddb = hiddb
self._access_token = None
self._decoded = None
self._refresh = None
self._key = key
self._secret = secret
@property
def access_token(self):
return self._access_token
@access_token.setter
def access_token(self, access_token):
if not access_token:
self._access_token = access_token
return
self._decoded = jwt.decode(access_token, options={"verify_signature": False})
self._access_token = access_token
if self._refresh:
self._refresh.cancel()
if 'exp' in self._decoded:
self._refresh = asyncio.ensure_future(set_timeout(self._decoded['exp'] - time.time() - 60, self.hiddb._machine_login, (self._key, self._secret)))
| 38.755844
| 157
| 0.579452
| 1,683
| 14,921
| 5.040404
| 0.070113
| 0.05069
| 0.033714
| 0.051868
| 0.836614
| 0.81681
| 0.8029
| 0.8029
| 0.788047
| 0.788047
| 0
| 0.006727
| 0.302594
| 14,921
| 384
| 158
| 38.856771
| 0.808476
| 0
| 0
| 0.712418
| 0
| 0
| 0.212586
| 0.090879
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009804
| false
| 0
| 0.013072
| 0.003268
| 0.107843
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c55febba3db7a3883231541cea9936a8d990fbc
| 46
|
py
|
Python
|
src_python/ctf/__init__.py
|
LinjianMa/ctf
|
06a50b6ea4be2eeb7f3d6c43f05a0befae94f08e
|
[
"BSD-2-Clause"
] | null | null | null |
src_python/ctf/__init__.py
|
LinjianMa/ctf
|
06a50b6ea4be2eeb7f3d6c43f05a0befae94f08e
|
[
"BSD-2-Clause"
] | 1
|
2021-03-15T21:44:12.000Z
|
2021-03-15T21:44:12.000Z
|
src_python/ctf/__init__.py
|
LinjianMa/ctf
|
06a50b6ea4be2eeb7f3d6c43f05a0befae94f08e
|
[
"BSD-2-Clause"
] | null | null | null |
from ctf.core import *
from ctf import random
| 15.333333
| 22
| 0.782609
| 8
| 46
| 4.5
| 0.625
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 23
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7cec5890e122df5841cb807166f62da901ac2c49
| 113,009
|
py
|
Python
|
sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PathOperations:
"""PathOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.storage.filedatalake.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def create(
self,
request_id_parameter: Optional[str] = None,
timeout: Optional[int] = None,
resource: Optional[Union[str, "_models.PathResourceType"]] = None,
continuation: Optional[str] = None,
mode: Optional[Union[str, "_models.PathRenameMode"]] = None,
rename_source: Optional[str] = None,
source_lease_id: Optional[str] = None,
properties: Optional[str] = None,
permissions: Optional[str] = None,
umask: Optional[str] = None,
path_http_headers: Optional["_models.PathHTTPHeaders"] = None,
lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None,
modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None,
source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None,
**kwargs
) -> None:
"""Create File | Create Directory | Rename File | Rename Directory.
Create or rename a file or directory. By default, the destination is overwritten and if the
destination already exists and has a lease the lease is broken. This operation supports
conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob
Service Operations <https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-
conditional-headers-for-blob-service-operations>`_. To fail if the destination already exists,
use a conditional request with If-None-Match: "*".
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param resource: Required only for Create File and Create Directory. The value must be "file"
or "directory".
:type resource: str or ~azure.storage.filedatalake.models.PathResourceType
:param continuation: Optional. When deleting a directory, the number of paths that are deleted
with each invocation is limited. If the number of paths to be deleted exceeds this limit, a
continuation token is returned in this response header. When a continuation token is returned
in the response, it must be specified in a subsequent invocation of the delete operation to
continue deleting the directory.
:type continuation: str
:param mode: Optional. Valid only when namespace is enabled. This parameter determines the
behavior of the rename operation. The value must be "legacy" or "posix", and the default value
will be "posix".
:type mode: str or ~azure.storage.filedatalake.models.PathRenameMode
:param rename_source: An optional file or directory to be renamed. The value must have the
following format: "/{filesystem}/{path}". If "x-ms-properties" is specified, the properties
will overwrite the existing properties; otherwise, the existing properties will be preserved.
This value must be a URL percent-encoded string. Note that the string may only contain ASCII
characters in the ISO-8859-1 character set.
:type rename_source: str
:param source_lease_id: A lease ID for the source path. If specified, the source path must have
an active lease and the lease ID must match.
:type source_lease_id: str
:param properties: Optional. User-defined properties to be stored with the filesystem, in the
format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value
is a base64 encoded string. Note that the string may only contain ASCII characters in the
ISO-8859-1 character set. If the filesystem exists, any properties not included in the list
will be removed. All properties are removed if the header is omitted. To merge new and
existing properties, first get all existing properties and the current E-Tag, then make a
conditional request with the E-Tag and include values for all properties.
:type properties: str
:param permissions: Optional and only valid if Hierarchical Namespace is enabled for the
account. Sets POSIX access permissions for the file owner, the file owning group, and others.
Each class may be granted read, write, or execute permission. The sticky bit is also
supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported.
:type permissions: str
:param umask: Optional and only valid if Hierarchical Namespace is enabled for the account.
When creating a file or directory and the parent folder does not have a default ACL, the umask
restricts the permissions of the file or directory to be created. The resulting permission is
given by p bitwise and not u, where p is the permission and u is the umask. For example, if p
is 0777 and u is 0057, then the resulting permission is 0720. The default permission is 0777
for a directory and 0666 for a file. The default umask is 0027. The umask must be specified
in 4-digit octal notation (e.g. 0766).
:type umask: str
:param path_http_headers: Parameter group.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:param source_modified_access_conditions: Parameter group.
:type source_modified_access_conditions: ~azure.storage.filedatalake.models.SourceModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_cache_control = None
_content_encoding = None
_content_language = None
_content_disposition = None
_content_type = None
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
_source_if_match = None
_source_if_none_match = None
_source_if_modified_since = None
_source_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
if path_http_headers is not None:
_cache_control = path_http_headers.cache_control
_content_encoding = path_http_headers.content_encoding
_content_language = path_http_headers.content_language
_content_disposition = path_http_headers.content_disposition
_content_type = path_http_headers.content_type
if source_modified_access_conditions is not None:
_source_if_match = source_modified_access_conditions.source_if_match
_source_if_none_match = source_modified_access_conditions.source_if_none_match
_source_if_modified_since = source_modified_access_conditions.source_if_modified_since
_source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since
accept = "application/json"
# Construct URL
url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if resource is not None:
query_parameters['resource'] = self._serialize.query("resource", resource, 'str')
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
if mode is not None:
query_parameters['mode'] = self._serialize.query("mode", mode, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if _cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str')
if _content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str')
if _content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str')
if _content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str')
if _content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str')
if rename_source is not None:
header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if source_lease_id is not None:
header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str')
if properties is not None:
header_parameters['x-ms-properties'] = self._serialize.header("properties", properties, 'str')
if permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("permissions", permissions, 'str')
if umask is not None:
header_parameters['x-ms-umask'] = self._serialize.header("umask", umask, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
if _source_if_match is not None:
header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str')
if _source_if_none_match is not None:
header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str')
if _source_if_modified_since is not None:
header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123')
if _source_if_unmodified_since is not None:
header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
if cls:
return cls(pipeline_response, None, response_headers)
create.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def update(
self,
action: Union[str, "_models.PathUpdateAction"],
mode: Union[str, "_models.PathSetAccessControlRecursiveMode"],
body: IO,
request_id_parameter: Optional[str] = None,
timeout: Optional[int] = None,
max_records: Optional[int] = None,
continuation: Optional[str] = None,
force_flag: Optional[bool] = None,
position: Optional[int] = None,
retain_uncommitted_data: Optional[bool] = None,
close: Optional[bool] = None,
content_length: Optional[int] = None,
properties: Optional[str] = None,
owner: Optional[str] = None,
group: Optional[str] = None,
permissions: Optional[str] = None,
acl: Optional[str] = None,
path_http_headers: Optional["_models.PathHTTPHeaders"] = None,
lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None,
modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None,
**kwargs
) -> Optional["_models.SetAccessControlRecursiveResponse"]:
"""Append Data | Flush Data | Set Properties | Set Access Control.
Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file,
sets properties for a file or directory, or sets access control for a file or directory. Data
can only be appended to a file. Concurrent writes to the same file using multiple clients are
not supported. This operation supports conditional HTTP requests. For more information, see
`Specifying Conditional Headers for Blob Service Operations <https://docs.microsoft.com/en-
us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations>`_.
:param action: The action must be "append" to upload data to be appended to a file, "flush" to
flush previously uploaded data to a file, "setProperties" to set the properties of a file or
directory, "setAccessControl" to set the owner, group, permissions, or access control list for
a file or directory, or "setAccessControlRecursive" to set the access control list for a
directory recursively. Note that Hierarchical Namespace must be enabled for the account in
order to use access control. Also note that the Access Control List (ACL) includes permissions
for the owner, owning group, and others, so the x-ms-permissions and x-ms-acl request headers
are mutually exclusive.
:type action: str or ~azure.storage.filedatalake.models.PathUpdateAction
:param mode: Mode "set" sets POSIX access control rights on files and directories, "modify"
modifies one or more POSIX access control rights that pre-exist on files and directories,
"remove" removes one or more POSIX access control rights that were present earlier on files
and directories.
:type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode
:param body: Initial data.
:type body: IO
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param max_records: Optional. Valid for "SetAccessControlRecursive" operation. It specifies the
maximum number of files or directories on which the acl change will be applied. If omitted or
greater than 2,000, the request will process up to 2,000 items.
:type max_records: int
:param continuation: Optional. The number of paths processed with each invocation is limited.
If the number of paths to be processed exceeds this limit, a continuation token is returned in
the response header x-ms-continuation. When a continuation token is returned in the response,
it must be percent-encoded and specified in a subsequent invocation of setAcessControlRecursive
operation.
:type continuation: str
:param force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to false,
the operation will terminate quickly on encountering user errors (4XX). If true, the operation
will ignore user errors and proceed with the operation on other sub-entities of the directory.
Continuation token will only be returned when forceFlag is true in case of user errors. If not
set the default value is false for this.
:type force_flag: bool
:param position: This parameter allows the caller to upload data in parallel and control the
order in which it is appended to the file. It is required when uploading data to be appended
to the file and when flushing previously uploaded data to the file. The value must be the
position where the data is to be appended. Uploaded data is not immediately flushed, or
written, to the file. To flush, the previously uploaded data must be contiguous, the position
parameter must be specified and equal to the length of the file after all data has been
written, and there must not be a request entity body included with the request.
:type position: long
:param retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data
is retained after the flush operation completes; otherwise, the uncommitted data is deleted
after the flush operation. The default is false. Data at offsets less than the specified
position are written to the file when flush succeeds, but this optional parameter allows data
after the flush position to be retained for a future flush operation.
:type retain_uncommitted_data: bool
:param close: Azure Storage Events allow applications to receive notifications when files
change. When Azure Storage Events are enabled, a file changed event is raised. This event has a
property indicating whether this is the final change to distinguish the difference between an
intermediate flush to a file stream and the final close of a file stream. The close query
parameter is valid only when the action is "flush" and change notifications are enabled. If the
value of close is "true" and the flush operation completes successfully, the service raises a
file change notification with a property indicating that this is the final update (the file
stream has been closed). If "false" a change notification is raised indicating the file has
changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to
indicate that the file stream has been closed.".
:type close: bool
:param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush
Data". Must be the length of the request content in bytes for "Append Data".
:type content_length: long
:param properties: Optional. User-defined properties to be stored with the filesystem, in the
format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value
is a base64 encoded string. Note that the string may only contain ASCII characters in the
ISO-8859-1 character set. If the filesystem exists, any properties not included in the list
will be removed. All properties are removed if the header is omitted. To merge new and
existing properties, first get all existing properties and the current E-Tag, then make a
conditional request with the E-Tag and include values for all properties.
:type properties: str
:param owner: Optional. The owner of the blob or directory.
:type owner: str
:param group: Optional. The owning group of the blob or directory.
:type group: str
:param permissions: Optional and only valid if Hierarchical Namespace is enabled for the
account. Sets POSIX access permissions for the file owner, the file owning group, and others.
Each class may be granted read, write, or execute permission. The sticky bit is also
supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported.
:type permissions: str
:param acl: Sets POSIX access control rights on files and directories. The value is a comma-
separated list of access control entries. Each access control entry (ACE) consists of a scope,
a type, a user or group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:param path_http_headers: Parameter group.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SetAccessControlRecursiveResponse, or the result of cls(response)
:rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.SetAccessControlRecursiveResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_content_md5 = None
_lease_id = None
_cache_control = None
_content_type = None
_content_disposition = None
_content_encoding = None
_content_language = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
if path_http_headers is not None:
_content_md5 = path_http_headers.content_md5
_cache_control = path_http_headers.cache_control
_content_type = path_http_headers.content_type
_content_disposition = path_http_headers.content_disposition
_content_encoding = path_http_headers.content_encoding
_content_language = path_http_headers.content_language
content_type = kwargs.pop("content_type", "application/octet-stream")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['action'] = self._serialize.query("action", action, 'str')
if max_records is not None:
query_parameters['maxRecords'] = self._serialize.query("max_records", max_records, 'int', minimum=1)
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
query_parameters['mode'] = self._serialize.query("mode", mode, 'str')
if force_flag is not None:
query_parameters['forceFlag'] = self._serialize.query("force_flag", force_flag, 'bool')
if position is not None:
query_parameters['position'] = self._serialize.query("position", position, 'long')
if retain_uncommitted_data is not None:
query_parameters['retainUncommittedData'] = self._serialize.query("retain_uncommitted_data", retain_uncommitted_data, 'bool')
if close is not None:
query_parameters['close'] = self._serialize.query("close", close, 'bool')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if content_length is not None:
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long', minimum=0)
if _content_md5 is not None:
header_parameters['x-ms-content-md5'] = self._serialize.header("content_md5", _content_md5, 'bytearray')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if _cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str')
if _content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str')
if _content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str')
if _content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str')
if _content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str')
if properties is not None:
header_parameters['x-ms-properties'] = self._serialize.header("properties", properties, 'str')
if owner is not None:
header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str')
if group is not None:
header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str')
if permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("permissions", permissions, 'str')
if acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("acl", acl, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content_kwargs['stream_content'] = body
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
deserialized = None
if response.status_code == 200:
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges'))
response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control'))
response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition'))
response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding'))
response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range'))
response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['x-ms-properties']=self._deserialize('str', response.headers.get('x-ms-properties'))
response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
deserialized = self._deserialize('SetAccessControlRecursiveResponse', pipeline_response)
if response.status_code == 202:
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
update.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def lease(
self,
x_ms_lease_action: Union[str, "_models.PathLeaseAction"],
request_id_parameter: Optional[str] = None,
timeout: Optional[int] = None,
x_ms_lease_duration: Optional[int] = None,
x_ms_lease_break_period: Optional[int] = None,
proposed_lease_id: Optional[str] = None,
lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None,
modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None,
**kwargs
) -> None:
"""Lease Path.
Create and manage a lease to restrict write and delete access to the path. This operation
supports conditional HTTP requests. For more information, see `Specifying Conditional Headers
for Blob Service Operations <https://docs.microsoft.com/en-
us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations>`_.
:param x_ms_lease_action: There are five lease actions: "acquire", "break", "change", "renew",
and "release". Use "acquire" and specify the "x-ms-proposed-lease-id" and "x-ms-lease-duration"
to acquire a new lease. Use "break" to break an existing lease. When a lease is broken, the
lease break period is allowed to elapse, during which time no lease operation except break and
release can be performed on the file. When a lease is successfully broken, the response
indicates the interval in seconds until a new lease can be acquired. Use "change" and specify
the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to
change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an
existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease.
:type x_ms_lease_action: str or ~azure.storage.filedatalake.models.PathLeaseAction
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies
the duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or
-1 for infinite lease.
:type x_ms_lease_duration: int
:param x_ms_lease_break_period: The lease break period duration is optional to break a lease,
and specifies the break period of the lease in seconds. The lease break duration must be
between 0 and 60 seconds.
:type x_ms_lease_break_period: int
:param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns
400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
Constructor (String) for a list of valid GUID string formats.
:type proposed_lease_id: str
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
accept = "application/json"
# Construct URL
url = self.lease.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['x-ms-lease-action'] = self._serialize.header("x_ms_lease_action", x_ms_lease_action, 'str')
if x_ms_lease_duration is not None:
header_parameters['x-ms-lease-duration'] = self._serialize.header("x_ms_lease_duration", x_ms_lease_duration, 'int')
if x_ms_lease_break_period is not None:
header_parameters['x-ms-lease-break-period'] = self._serialize.header("x_ms_lease_break_period", x_ms_lease_break_period, 'int')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if proposed_lease_id is not None:
header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
if response.status_code == 200:
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id'))
if response.status_code == 201:
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id'))
if response.status_code == 202:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-lease-time']=self._deserialize('str', response.headers.get('x-ms-lease-time'))
if cls:
return cls(pipeline_response, None, response_headers)
lease.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def read(
self,
request_id_parameter: Optional[str] = None,
timeout: Optional[int] = None,
range: Optional[str] = None,
x_ms_range_get_content_md5: Optional[bool] = None,
lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None,
modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None,
**kwargs
) -> IO:
"""Read File.
Read the contents of a file. For read operations, range requests are supported. This operation
supports conditional HTTP requests. For more information, see `Specifying Conditional Headers
for Blob Service Operations <https://docs.microsoft.com/en-
us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations>`_.
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param range: The HTTP Range request header specifies one or more byte ranges of the resource
to be retrieved.
:type range: str
:param x_ms_range_get_content_md5: Optional. When this header is set to "true" and specified
together with the Range header, the service returns the MD5 hash for the range, as long as the
range is less than or equal to 4MB in size. If this header is specified without the Range
header, the service returns status code 400 (Bad Request). If this header is set to true when
the range exceeds 4 MB in size, the service returns status code 400 (Bad Request).
:type x_ms_range_get_content_md5: bool
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IO, or the result of cls(response)
:rtype: IO
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[IO]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
accept = "application/json"
# Construct URL
url = self.read.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if range is not None:
header_parameters['Range'] = self._serialize.header("range", range, 'str')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if x_ms_range_get_content_md5 is not None:
header_parameters['x-ms-range-get-content-md5'] = self._serialize.header("x_ms_range_get_content_md5", x_ms_range_get_content_md5, 'bool')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 206]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
if response.status_code == 200:
response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges'))
response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control'))
response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition'))
response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding'))
response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range'))
response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-resource-type']=self._deserialize('str', response.headers.get('x-ms-resource-type'))
response_headers['x-ms-properties']=self._deserialize('str', response.headers.get('x-ms-properties'))
response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration'))
response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state'))
response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status'))
deserialized = response.stream_download(self._client._pipeline)
if response.status_code == 206:
response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges'))
response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control'))
response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition'))
response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding'))
response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range'))
response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['x-ms-content-md5']=self._deserialize('str', response.headers.get('x-ms-content-md5'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-resource-type']=self._deserialize('str', response.headers.get('x-ms-resource-type'))
response_headers['x-ms-properties']=self._deserialize('str', response.headers.get('x-ms-properties'))
response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration'))
response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state'))
response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status'))
deserialized = response.stream_download(self._client._pipeline)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
read.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def get_properties(
self,
request_id_parameter: Optional[str] = None,
timeout: Optional[int] = None,
action: Optional[Union[str, "_models.PathGetPropertiesAction"]] = None,
upn: Optional[bool] = None,
lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None,
modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None,
**kwargs
) -> None:
"""Get Properties | Get Status | Get Access Control List.
Get Properties returns all system and user defined properties for a path. Get Status returns
all system defined properties for a path. Get Access Control List returns the access control
list for a path. This operation supports conditional HTTP requests. For more information, see
`Specifying Conditional Headers for Blob Service Operations <https://docs.microsoft.com/en-
us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations>`_.
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param action: Optional. If the value is "getStatus" only the system defined properties for the
path are returned. If the value is "getAccessControl" the access control list is returned in
the response headers (Hierarchical Namespace must be enabled for the account), otherwise the
properties are returned.
:type action: str or ~azure.storage.filedatalake.models.PathGetPropertiesAction
:param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If
"true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response
headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If
"false", the values will be returned as Azure Active Directory Object IDs. The default value is
false. Note that group and application Object IDs are not translated because they do not have
unique friendly names.
:type upn: bool
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
accept = "application/json"
# Construct URL
url = self.get_properties.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if action is not None:
query_parameters['action'] = self._serialize.query("action", action, 'str')
if upn is not None:
query_parameters['upn'] = self._serialize.query("upn", upn, 'bool')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges'))
response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control'))
response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition'))
response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding'))
response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range'))
response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-resource-type']=self._deserialize('str', response.headers.get('x-ms-resource-type'))
response_headers['x-ms-properties']=self._deserialize('str', response.headers.get('x-ms-properties'))
response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner'))
response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group'))
response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions'))
response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl'))
response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration'))
response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state'))
response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status'))
if cls:
return cls(pipeline_response, None, response_headers)
get_properties.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def delete(
self,
request_id_parameter: Optional[str] = None,
timeout: Optional[int] = None,
recursive: Optional[bool] = None,
continuation: Optional[str] = None,
lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None,
modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None,
**kwargs
) -> None:
"""Delete File | Delete Directory.
Delete the file or directory. This operation supports conditional HTTP requests. For more
information, see `Specifying Conditional Headers for Blob Service Operations
<https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-
blob-service-operations>`_.
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param recursive: Required.
:type recursive: bool
:param continuation: Optional. When deleting a directory, the number of paths that are deleted
with each invocation is limited. If the number of paths to be deleted exceeds this limit, a
continuation token is returned in this response header. When a continuation token is returned
in the response, it must be specified in a subsequent invocation of the delete operation to
continue deleting the directory.
:type continuation: str
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if recursive is not None:
query_parameters['recursive'] = self._serialize.query("recursive", recursive, 'bool')
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation'))
response_headers['x-ms-deletion-id']=self._deserialize('str', response.headers.get('x-ms-deletion-id'))
if cls:
return cls(pipeline_response, None, response_headers)
delete.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def set_access_control(
self,
timeout: Optional[int] = None,
owner: Optional[str] = None,
group: Optional[str] = None,
permissions: Optional[str] = None,
acl: Optional[str] = None,
request_id_parameter: Optional[str] = None,
lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None,
modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None,
**kwargs
) -> None:
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param owner: Optional. The owner of the blob or directory.
:type owner: str
:param group: Optional. The owning group of the blob or directory.
:type group: str
:param permissions: Optional and only valid if Hierarchical Namespace is enabled for the
account. Sets POSIX access permissions for the file owner, the file owning group, and others.
Each class may be granted read, write, or execute permission. The sticky bit is also
supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported.
:type permissions: str
:param acl: Sets POSIX access control rights on files and directories. The value is a comma-
separated list of access control entries. Each access control entry (ACE) consists of a scope,
a type, a user or group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
action = "setAccessControl"
accept = "application/json"
# Construct URL
url = self.set_access_control.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['action'] = self._serialize.query("action", action, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if owner is not None:
header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str')
if group is not None:
header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str')
if permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("permissions", permissions, 'str')
if acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("acl", acl, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
if cls:
return cls(pipeline_response, None, response_headers)
set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def set_access_control_recursive(
self,
mode: Union[str, "_models.PathSetAccessControlRecursiveMode"],
timeout: Optional[int] = None,
continuation: Optional[str] = None,
force_flag: Optional[bool] = None,
max_records: Optional[int] = None,
acl: Optional[str] = None,
request_id_parameter: Optional[str] = None,
**kwargs
) -> "_models.SetAccessControlRecursiveResponse":
"""Set the access control list for a path and subpaths.
:param mode: Mode "set" sets POSIX access control rights on files and directories, "modify"
modifies one or more POSIX access control rights that pre-exist on files and directories,
"remove" removes one or more POSIX access control rights that were present earlier on files
and directories.
:type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param continuation: Optional. When deleting a directory, the number of paths that are deleted
with each invocation is limited. If the number of paths to be deleted exceeds this limit, a
continuation token is returned in this response header. When a continuation token is returned
in the response, it must be specified in a subsequent invocation of the delete operation to
continue deleting the directory.
:type continuation: str
:param force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to false,
the operation will terminate quickly on encountering user errors (4XX). If true, the operation
will ignore user errors and proceed with the operation on other sub-entities of the directory.
Continuation token will only be returned when forceFlag is true in case of user errors. If not
set the default value is false for this.
:type force_flag: bool
:param max_records: Optional. It specifies the maximum number of files or directories on which
the acl change will be applied. If omitted or greater than 2,000, the request will process up
to 2,000 items.
:type max_records: int
:param acl: Sets POSIX access control rights on files and directories. The value is a comma-
separated list of access control entries. Each access control entry (ACE) consists of a scope,
a type, a user or group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SetAccessControlRecursiveResponse, or the result of cls(response)
:rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SetAccessControlRecursiveResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
action = "setAccessControlRecursive"
accept = "application/json"
# Construct URL
url = self.set_access_control_recursive.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['action'] = self._serialize.query("action", action, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
query_parameters['mode'] = self._serialize.query("mode", mode, 'str')
if force_flag is not None:
query_parameters['forceFlag'] = self._serialize.query("force_flag", force_flag, 'bool')
if max_records is not None:
query_parameters['maxRecords'] = self._serialize.query("max_records", max_records, 'int', minimum=1)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("acl", acl, 'str')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
deserialized = self._deserialize('SetAccessControlRecursiveResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
set_access_control_recursive.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def flush_data(
self,
timeout: Optional[int] = None,
position: Optional[int] = None,
retain_uncommitted_data: Optional[bool] = None,
close: Optional[bool] = None,
content_length: Optional[int] = None,
request_id_parameter: Optional[str] = None,
path_http_headers: Optional["_models.PathHTTPHeaders"] = None,
lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None,
modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None,
**kwargs
) -> None:
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param position: This parameter allows the caller to upload data in parallel and control the
order in which it is appended to the file. It is required when uploading data to be appended
to the file and when flushing previously uploaded data to the file. The value must be the
position where the data is to be appended. Uploaded data is not immediately flushed, or
written, to the file. To flush, the previously uploaded data must be contiguous, the position
parameter must be specified and equal to the length of the file after all data has been
written, and there must not be a request entity body included with the request.
:type position: long
:param retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data
is retained after the flush operation completes; otherwise, the uncommitted data is deleted
after the flush operation. The default is false. Data at offsets less than the specified
position are written to the file when flush succeeds, but this optional parameter allows data
after the flush position to be retained for a future flush operation.
:type retain_uncommitted_data: bool
:param close: Azure Storage Events allow applications to receive notifications when files
change. When Azure Storage Events are enabled, a file changed event is raised. This event has a
property indicating whether this is the final change to distinguish the difference between an
intermediate flush to a file stream and the final close of a file stream. The close query
parameter is valid only when the action is "flush" and change notifications are enabled. If the
value of close is "true" and the flush operation completes successfully, the service raises a
file change notification with a property indicating that this is the final update (the file
stream has been closed). If "false" a change notification is raised indicating the file has
changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to
indicate that the file stream has been closed.".
:type close: bool
:param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush
Data". Must be the length of the request content in bytes for "Append Data".
:type content_length: long
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param path_http_headers: Parameter group.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_content_md5 = None
_lease_id = None
_cache_control = None
_content_type = None
_content_disposition = None
_content_encoding = None
_content_language = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
if path_http_headers is not None:
_content_md5 = path_http_headers.content_md5
_cache_control = path_http_headers.cache_control
_content_type = path_http_headers.content_type
_content_disposition = path_http_headers.content_disposition
_content_encoding = path_http_headers.content_encoding
_content_language = path_http_headers.content_language
action = "flush"
accept = "application/json"
# Construct URL
url = self.flush_data.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['action'] = self._serialize.query("action", action, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if position is not None:
query_parameters['position'] = self._serialize.query("position", position, 'long')
if retain_uncommitted_data is not None:
query_parameters['retainUncommittedData'] = self._serialize.query("retain_uncommitted_data", retain_uncommitted_data, 'bool')
if close is not None:
query_parameters['close'] = self._serialize.query("close", close, 'bool')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if content_length is not None:
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long', minimum=0)
if _content_md5 is not None:
header_parameters['x-ms-content-md5'] = self._serialize.header("content_md5", _content_md5, 'bytearray')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if _cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str')
if _content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str')
if _content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str')
if _content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str')
if _content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
if cls:
return cls(pipeline_response, None, response_headers)
flush_data.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def append_data(
self,
body: IO,
position: Optional[int] = None,
timeout: Optional[int] = None,
content_length: Optional[int] = None,
transactional_content_crc64: Optional[bytearray] = None,
request_id_parameter: Optional[str] = None,
path_http_headers: Optional["_models.PathHTTPHeaders"] = None,
lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None,
**kwargs
) -> None:
"""Append data to the file.
:param body: Initial data.
:type body: IO
:param position: This parameter allows the caller to upload data in parallel and control the
order in which it is appended to the file. It is required when uploading data to be appended
to the file and when flushing previously uploaded data to the file. The value must be the
position where the data is to be appended. Uploaded data is not immediately flushed, or
written, to the file. To flush, the previously uploaded data must be contiguous, the position
parameter must be specified and equal to the length of the file after all data has been
written, and there must not be a request entity body included with the request.
:type position: long
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush
Data". Must be the length of the request content in bytes for "Append Data".
:type content_length: long
:param transactional_content_crc64: Specify the transactional crc64 for the body, to be
validated by the service.
:type transactional_content_crc64: bytearray
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param path_http_headers: Parameter group.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_transactional_content_hash = None
_lease_id = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if path_http_headers is not None:
_transactional_content_hash = path_http_headers.transactional_content_hash
action = "append"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.append_data.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['action'] = self._serialize.query("action", action, 'str')
if position is not None:
query_parameters['position'] = self._serialize.query("position", position, 'long')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if content_length is not None:
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long', minimum=0)
if _transactional_content_hash is not None:
header_parameters['Content-MD5'] = self._serialize.header("transactional_content_hash", _transactional_content_hash, 'bytearray')
if transactional_content_crc64 is not None:
header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content_kwargs['stream_content'] = body
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5'))
response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64'))
response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted'))
if cls:
return cls(pipeline_response, None, response_headers)
append_data.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def set_expiry(
self,
expiry_options: Union[str, "_models.PathExpiryOptions"],
timeout: Optional[int] = None,
request_id_parameter: Optional[str] = None,
expires_on: Optional[str] = None,
**kwargs
) -> None:
"""Sets the time a blob will expire and be deleted.
:param expiry_options: Required. Indicates mode of the expiry time.
:type expiry_options: str or ~azure.storage.filedatalake.models.PathExpiryOptions
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param expires_on: The time to set the blob to expiry.
:type expires_on: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
comp = "expiry"
accept = "application/json"
# Construct URL
url = self.set_expiry.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str')
if expires_on is not None:
header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
if cls:
return cls(pipeline_response, None, response_headers)
set_expiry.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
async def undelete(
self,
timeout: Optional[int] = None,
undelete_source: Optional[str] = None,
request_id_parameter: Optional[str] = None,
**kwargs
) -> None:
"""Undelete a path that was previously soft deleted.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param undelete_source: Only for hierarchical namespace enabled accounts. Optional. The path of
the soft deleted blob to undelete.
:type undelete_source: str
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
comp = "undelete"
accept = "application/json"
# Construct URL
url = self.undelete.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if undelete_source is not None:
header_parameters['x-ms-undelete-source'] = self._serialize.header("undelete_source", undelete_source, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-resource-type']=self._deserialize('str', response.headers.get('x-ms-resource-type'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
if cls:
return cls(pipeline_response, None, response_headers)
undelete.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
| 63.702931
| 160
| 0.691051
| 13,962
| 113,009
| 5.401089
| 0.042329
| 0.06405
| 0.018499
| 0.03896
| 0.898647
| 0.878597
| 0.862485
| 0.847951
| 0.831269
| 0.821721
| 0
| 0.006155
| 0.207824
| 113,009
| 1,773
| 161
| 63.738861
| 0.836199
| 0.024502
| 0
| 0.830084
| 0
| 0
| 0.159786
| 0.031316
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000929
| false
| 0
| 0.0065
| 0
| 0.023213
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ced9f0128f9f6a28d0dc662655efe317ad51a48
| 30,621
|
py
|
Python
|
sdk/python/pulumi_azure/sentinel/automation_rule.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/sentinel/automation_rule.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/sentinel/automation_rule.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['AutomationRuleArgs', 'AutomationRule']
@pulumi.input_type
class AutomationRuleArgs:
def __init__(__self__, *,
display_name: pulumi.Input[str],
log_analytics_workspace_id: pulumi.Input[str],
order: pulumi.Input[int],
action_incidents: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionIncidentArgs']]]] = None,
action_playbooks: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionPlaybookArgs']]]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleConditionArgs']]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expiration: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AutomationRule resource.
:param pulumi.Input[str] display_name: The display name which should be used for this Sentinel Automation Rule.
:param pulumi.Input[str] log_analytics_workspace_id: The ID of the Log Analytics Workspace where this Sentinel applies to. Changing this forces a new Sentinel Automation Rule to be created.
:param pulumi.Input[int] order: The order of this Sentinel Automation Rule. Possible values varies between `1` and `1000`.
:param pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionIncidentArgs']]] action_incidents: One or more `action_incident` blocks as defined below.
:param pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionPlaybookArgs']]] action_playbooks: One or more `action_playbook` blocks as defined below.
:param pulumi.Input[Sequence[pulumi.Input['AutomationRuleConditionArgs']]] conditions: One or more `condition` blocks as defined below.
:param pulumi.Input[bool] enabled: Whether this Sentinel Automation Rule is enabled? Defaults to `true`.
:param pulumi.Input[str] expiration: The time in RFC3339 format of kind `UTC` that determines when this Automation Rule should expire and be disabled.
:param pulumi.Input[str] name: The UUID which should be used for this Sentinel Automation Rule. Changing this forces a new Sentinel Automation Rule to be created.
"""
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "log_analytics_workspace_id", log_analytics_workspace_id)
pulumi.set(__self__, "order", order)
if action_incidents is not None:
pulumi.set(__self__, "action_incidents", action_incidents)
if action_playbooks is not None:
pulumi.set(__self__, "action_playbooks", action_playbooks)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if expiration is not None:
pulumi.set(__self__, "expiration", expiration)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
The display name which should be used for this Sentinel Automation Rule.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="logAnalyticsWorkspaceId")
def log_analytics_workspace_id(self) -> pulumi.Input[str]:
"""
The ID of the Log Analytics Workspace where this Sentinel applies to. Changing this forces a new Sentinel Automation Rule to be created.
"""
return pulumi.get(self, "log_analytics_workspace_id")
@log_analytics_workspace_id.setter
def log_analytics_workspace_id(self, value: pulumi.Input[str]):
pulumi.set(self, "log_analytics_workspace_id", value)
@property
@pulumi.getter
def order(self) -> pulumi.Input[int]:
"""
The order of this Sentinel Automation Rule. Possible values varies between `1` and `1000`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: pulumi.Input[int]):
pulumi.set(self, "order", value)
@property
@pulumi.getter(name="actionIncidents")
def action_incidents(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionIncidentArgs']]]]:
"""
One or more `action_incident` blocks as defined below.
"""
return pulumi.get(self, "action_incidents")
@action_incidents.setter
def action_incidents(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionIncidentArgs']]]]):
pulumi.set(self, "action_incidents", value)
@property
@pulumi.getter(name="actionPlaybooks")
def action_playbooks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionPlaybookArgs']]]]:
"""
One or more `action_playbook` blocks as defined below.
"""
return pulumi.get(self, "action_playbooks")
@action_playbooks.setter
def action_playbooks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionPlaybookArgs']]]]):
pulumi.set(self, "action_playbooks", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleConditionArgs']]]]:
"""
One or more `condition` blocks as defined below.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether this Sentinel Automation Rule is enabled? Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def expiration(self) -> Optional[pulumi.Input[str]]:
"""
The time in RFC3339 format of kind `UTC` that determines when this Automation Rule should expire and be disabled.
"""
return pulumi.get(self, "expiration")
@expiration.setter
def expiration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expiration", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The UUID which should be used for this Sentinel Automation Rule. Changing this forces a new Sentinel Automation Rule to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _AutomationRuleState:
def __init__(__self__, *,
action_incidents: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionIncidentArgs']]]] = None,
action_playbooks: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionPlaybookArgs']]]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleConditionArgs']]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expiration: Optional[pulumi.Input[str]] = None,
log_analytics_workspace_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering AutomationRule resources.
:param pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionIncidentArgs']]] action_incidents: One or more `action_incident` blocks as defined below.
:param pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionPlaybookArgs']]] action_playbooks: One or more `action_playbook` blocks as defined below.
:param pulumi.Input[Sequence[pulumi.Input['AutomationRuleConditionArgs']]] conditions: One or more `condition` blocks as defined below.
:param pulumi.Input[str] display_name: The display name which should be used for this Sentinel Automation Rule.
:param pulumi.Input[bool] enabled: Whether this Sentinel Automation Rule is enabled? Defaults to `true`.
:param pulumi.Input[str] expiration: The time in RFC3339 format of kind `UTC` that determines when this Automation Rule should expire and be disabled.
:param pulumi.Input[str] log_analytics_workspace_id: The ID of the Log Analytics Workspace where this Sentinel applies to. Changing this forces a new Sentinel Automation Rule to be created.
:param pulumi.Input[str] name: The UUID which should be used for this Sentinel Automation Rule. Changing this forces a new Sentinel Automation Rule to be created.
:param pulumi.Input[int] order: The order of this Sentinel Automation Rule. Possible values varies between `1` and `1000`.
"""
if action_incidents is not None:
pulumi.set(__self__, "action_incidents", action_incidents)
if action_playbooks is not None:
pulumi.set(__self__, "action_playbooks", action_playbooks)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if expiration is not None:
pulumi.set(__self__, "expiration", expiration)
if log_analytics_workspace_id is not None:
pulumi.set(__self__, "log_analytics_workspace_id", log_analytics_workspace_id)
if name is not None:
pulumi.set(__self__, "name", name)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter(name="actionIncidents")
def action_incidents(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionIncidentArgs']]]]:
"""
One or more `action_incident` blocks as defined below.
"""
return pulumi.get(self, "action_incidents")
@action_incidents.setter
def action_incidents(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionIncidentArgs']]]]):
pulumi.set(self, "action_incidents", value)
@property
@pulumi.getter(name="actionPlaybooks")
def action_playbooks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionPlaybookArgs']]]]:
"""
One or more `action_playbook` blocks as defined below.
"""
return pulumi.get(self, "action_playbooks")
@action_playbooks.setter
def action_playbooks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleActionPlaybookArgs']]]]):
pulumi.set(self, "action_playbooks", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleConditionArgs']]]]:
"""
One or more `condition` blocks as defined below.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutomationRuleConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The display name which should be used for this Sentinel Automation Rule.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether this Sentinel Automation Rule is enabled? Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def expiration(self) -> Optional[pulumi.Input[str]]:
"""
The time in RFC3339 format of kind `UTC` that determines when this Automation Rule should expire and be disabled.
"""
return pulumi.get(self, "expiration")
@expiration.setter
def expiration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expiration", value)
@property
@pulumi.getter(name="logAnalyticsWorkspaceId")
def log_analytics_workspace_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Log Analytics Workspace where this Sentinel applies to. Changing this forces a new Sentinel Automation Rule to be created.
"""
return pulumi.get(self, "log_analytics_workspace_id")
@log_analytics_workspace_id.setter
def log_analytics_workspace_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "log_analytics_workspace_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The UUID which should be used for this Sentinel Automation Rule. Changing this forces a new Sentinel Automation Rule to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
The order of this Sentinel Automation Rule. Possible values varies between `1` and `1000`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
class AutomationRule(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action_incidents: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionIncidentArgs']]]]] = None,
action_playbooks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionPlaybookArgs']]]]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleConditionArgs']]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expiration: Optional[pulumi.Input[str]] = None,
log_analytics_workspace_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
order: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Manages a Sentinel Automation Rule.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="west europe")
example_analytics_workspace = azure.operationalinsights.AnalyticsWorkspace("exampleAnalyticsWorkspace",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="pergb2018")
sentinel = azure.operationalinsights.AnalyticsSolution("sentinel",
solution_name="SecurityInsights",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
workspace_resource_id=example_analytics_workspace.id,
workspace_name=example_analytics_workspace.name,
plan=azure.operationalinsights.AnalyticsSolutionPlanArgs(
publisher="Microsoft",
product="OMSGallery/SecurityInsights",
))
example_automation_rule = azure.sentinel.AutomationRule("exampleAutomationRule",
log_analytics_workspace_id=sentinel.workspace_resource_id,
display_name="automation_rule1",
order=1,
action_incidents=[azure.sentinel.AutomationRuleActionIncidentArgs(
order=1,
status="Active",
)])
```
## Import
Sentinel Automation Rules can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:sentinel/automationRule:AutomationRule example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/providers/Microsoft.SecurityInsights/AutomationRules/rule1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionIncidentArgs']]]] action_incidents: One or more `action_incident` blocks as defined below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionPlaybookArgs']]]] action_playbooks: One or more `action_playbook` blocks as defined below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleConditionArgs']]]] conditions: One or more `condition` blocks as defined below.
:param pulumi.Input[str] display_name: The display name which should be used for this Sentinel Automation Rule.
:param pulumi.Input[bool] enabled: Whether this Sentinel Automation Rule is enabled? Defaults to `true`.
:param pulumi.Input[str] expiration: The time in RFC3339 format of kind `UTC` that determines when this Automation Rule should expire and be disabled.
:param pulumi.Input[str] log_analytics_workspace_id: The ID of the Log Analytics Workspace where this Sentinel applies to. Changing this forces a new Sentinel Automation Rule to be created.
:param pulumi.Input[str] name: The UUID which should be used for this Sentinel Automation Rule. Changing this forces a new Sentinel Automation Rule to be created.
:param pulumi.Input[int] order: The order of this Sentinel Automation Rule. Possible values varies between `1` and `1000`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AutomationRuleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Sentinel Automation Rule.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="west europe")
example_analytics_workspace = azure.operationalinsights.AnalyticsWorkspace("exampleAnalyticsWorkspace",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="pergb2018")
sentinel = azure.operationalinsights.AnalyticsSolution("sentinel",
solution_name="SecurityInsights",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
workspace_resource_id=example_analytics_workspace.id,
workspace_name=example_analytics_workspace.name,
plan=azure.operationalinsights.AnalyticsSolutionPlanArgs(
publisher="Microsoft",
product="OMSGallery/SecurityInsights",
))
example_automation_rule = azure.sentinel.AutomationRule("exampleAutomationRule",
log_analytics_workspace_id=sentinel.workspace_resource_id,
display_name="automation_rule1",
order=1,
action_incidents=[azure.sentinel.AutomationRuleActionIncidentArgs(
order=1,
status="Active",
)])
```
## Import
Sentinel Automation Rules can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:sentinel/automationRule:AutomationRule example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/providers/Microsoft.SecurityInsights/AutomationRules/rule1
```
:param str resource_name: The name of the resource.
:param AutomationRuleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AutomationRuleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action_incidents: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionIncidentArgs']]]]] = None,
action_playbooks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionPlaybookArgs']]]]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleConditionArgs']]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expiration: Optional[pulumi.Input[str]] = None,
log_analytics_workspace_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
order: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AutomationRuleArgs.__new__(AutomationRuleArgs)
__props__.__dict__["action_incidents"] = action_incidents
__props__.__dict__["action_playbooks"] = action_playbooks
__props__.__dict__["conditions"] = conditions
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
__props__.__dict__["enabled"] = enabled
__props__.__dict__["expiration"] = expiration
if log_analytics_workspace_id is None and not opts.urn:
raise TypeError("Missing required property 'log_analytics_workspace_id'")
__props__.__dict__["log_analytics_workspace_id"] = log_analytics_workspace_id
__props__.__dict__["name"] = name
if order is None and not opts.urn:
raise TypeError("Missing required property 'order'")
__props__.__dict__["order"] = order
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure:sentinel/authomationRule:AuthomationRule")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(AutomationRule, __self__).__init__(
'azure:sentinel/automationRule:AutomationRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
action_incidents: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionIncidentArgs']]]]] = None,
action_playbooks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionPlaybookArgs']]]]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleConditionArgs']]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expiration: Optional[pulumi.Input[str]] = None,
log_analytics_workspace_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
order: Optional[pulumi.Input[int]] = None) -> 'AutomationRule':
"""
Get an existing AutomationRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionIncidentArgs']]]] action_incidents: One or more `action_incident` blocks as defined below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleActionPlaybookArgs']]]] action_playbooks: One or more `action_playbook` blocks as defined below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AutomationRuleConditionArgs']]]] conditions: One or more `condition` blocks as defined below.
:param pulumi.Input[str] display_name: The display name which should be used for this Sentinel Automation Rule.
:param pulumi.Input[bool] enabled: Whether this Sentinel Automation Rule is enabled? Defaults to `true`.
:param pulumi.Input[str] expiration: The time in RFC3339 format of kind `UTC` that determines when this Automation Rule should expire and be disabled.
:param pulumi.Input[str] log_analytics_workspace_id: The ID of the Log Analytics Workspace where this Sentinel applies to. Changing this forces a new Sentinel Automation Rule to be created.
:param pulumi.Input[str] name: The UUID which should be used for this Sentinel Automation Rule. Changing this forces a new Sentinel Automation Rule to be created.
:param pulumi.Input[int] order: The order of this Sentinel Automation Rule. Possible values varies between `1` and `1000`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AutomationRuleState.__new__(_AutomationRuleState)
__props__.__dict__["action_incidents"] = action_incidents
__props__.__dict__["action_playbooks"] = action_playbooks
__props__.__dict__["conditions"] = conditions
__props__.__dict__["display_name"] = display_name
__props__.__dict__["enabled"] = enabled
__props__.__dict__["expiration"] = expiration
__props__.__dict__["log_analytics_workspace_id"] = log_analytics_workspace_id
__props__.__dict__["name"] = name
__props__.__dict__["order"] = order
return AutomationRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="actionIncidents")
def action_incidents(self) -> pulumi.Output[Optional[Sequence['outputs.AutomationRuleActionIncident']]]:
"""
One or more `action_incident` blocks as defined below.
"""
return pulumi.get(self, "action_incidents")
@property
@pulumi.getter(name="actionPlaybooks")
def action_playbooks(self) -> pulumi.Output[Optional[Sequence['outputs.AutomationRuleActionPlaybook']]]:
"""
One or more `action_playbook` blocks as defined below.
"""
return pulumi.get(self, "action_playbooks")
@property
@pulumi.getter
def conditions(self) -> pulumi.Output[Optional[Sequence['outputs.AutomationRuleCondition']]]:
"""
One or more `condition` blocks as defined below.
"""
return pulumi.get(self, "conditions")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
The display name which should be used for this Sentinel Automation Rule.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether this Sentinel Automation Rule is enabled? Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def expiration(self) -> pulumi.Output[Optional[str]]:
"""
The time in RFC3339 format of kind `UTC` that determines when this Automation Rule should expire and be disabled.
"""
return pulumi.get(self, "expiration")
@property
@pulumi.getter(name="logAnalyticsWorkspaceId")
def log_analytics_workspace_id(self) -> pulumi.Output[str]:
"""
The ID of the Log Analytics Workspace where this Sentinel applies to. Changing this forces a new Sentinel Automation Rule to be created.
"""
return pulumi.get(self, "log_analytics_workspace_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The UUID which should be used for this Sentinel Automation Rule. Changing this forces a new Sentinel Automation Rule to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def order(self) -> pulumi.Output[int]:
"""
The order of this Sentinel Automation Rule. Possible values varies between `1` and `1000`.
"""
return pulumi.get(self, "order")
| 50.280788
| 272
| 0.679207
| 3,370
| 30,621
| 5.981899
| 0.070326
| 0.087306
| 0.067861
| 0.048365
| 0.903219
| 0.889231
| 0.872265
| 0.858078
| 0.849397
| 0.841312
| 0
| 0.006222
| 0.223213
| 30,621
| 608
| 273
| 50.363487
| 0.841293
| 0.382385
| 0
| 0.756923
| 1
| 0
| 0.143948
| 0.077702
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0.003077
| 0.021538
| 0
| 0.276923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7cf29220101d277d411cba01928ae07251b454c3
| 193,879
|
py
|
Python
|
jdcloud_cli/controllers/services/rds.py
|
oulinbao/jdcloud-cli
|
ae9998829d5b1a79c69260940f36f645f009afa7
|
[
"Apache-2.0"
] | 95
|
2018-06-05T10:49:32.000Z
|
2019-12-31T11:07:36.000Z
|
jdcloud_cli/controllers/services/rds.py
|
oulinbao/jdcloud-cli
|
ae9998829d5b1a79c69260940f36f645f009afa7
|
[
"Apache-2.0"
] | 22
|
2018-06-05T10:58:59.000Z
|
2020-07-31T12:13:19.000Z
|
jdcloud_cli/controllers/services/rds.py
|
oulinbao/jdcloud-cli
|
ae9998829d5b1a79c69260940f36f645f009afa7
|
[
"Apache-2.0"
] | 21
|
2018-06-04T12:50:27.000Z
|
2020-11-05T10:55:28.000Z
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from argparse import RawTextHelpFormatter
from jdcloud_cli.cement.ext.ext_argparse import expose
from jdcloud_cli.controllers.base_controller import BaseController
from jdcloud_cli.client_factory import ClientFactory
from jdcloud_cli.parameter_builder import collect_user_args, collect_user_headers
from jdcloud_cli.printer import Printer
from jdcloud_cli.skeleton import Skeleton
class RdsController(BaseController):
class Meta:
label = 'rds'
help = '云数据库RDS'
description = '''
rds cli 子命令,目前RDS OpenAPI支持云数据库 MySQL、Percona、MariaDB、SQL Server、PostgreSQL。
OpenAPI文档地址为:https://docs.jdcloud.com/cn/rds/api/overview
'''
stacked_on = 'base'
stacked_type = 'nested'
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页; """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为100,取值范围:[10,100],用于查询列表的接口 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看某个RDS实例下所有账号信息,包括账号名称、对各个数据库的访问权限信息等 ''',
description='''
查看某个RDS实例下所有账号信息,包括账号名称、对各个数据库的访问权限信息等。
示例: jdc rds describe-accounts --instance-id xxx
''',
)
def describe_accounts(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeAccountsRequest import DescribeAccountsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeAccountsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--account-name'], dict(help="""(string) 账号名,在同一个RDS实例中,账号名不能重复。账号名的具体规则可参见帮助中心文档:[名称及密码限制](../../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md) """, dest='accountName', required=True)),
(['--account-password'], dict(help="""(string) 密码,密码的具体规则可参见帮助中心文档:[名称及密码限制](../../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md) """, dest='accountPassword', required=True)),
(['--notes'], dict(help="""(string) 备注信息,仅支持PostgreSQL """, dest='notes', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 创建数据库账号,用户可以使用客户端,应用程序等通过该账号和密码登录RDS数据库实例。<br>为便于管理和恢复,RDS对账号进行了限制,数据库账号只能通过控制台或者OpenAPI进行创建、删除账号以及对账号授权等,用户不能通过SQL语句对账号进行相关操作。 ''',
description='''
创建数据库账号,用户可以使用客户端,应用程序等通过该账号和密码登录RDS数据库实例。<br>为便于管理和恢复,RDS对账号进行了限制,数据库账号只能通过控制台或者OpenAPI进行创建、删除账号以及对账号授权等,用户不能通过SQL语句对账号进行相关操作。。
示例: jdc rds create-account --instance-id xxx --account-name xxx --account-password xxx
''',
)
def create_account(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateAccountRequest import CreateAccountRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateAccountRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--account-name'], dict(help="""(string) 账号名,在同一个实例中账号名不能重复 """, dest='accountName', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看RDS实例的账号的权限信息 - 仅支持 MySQL,Percona,MariaDB ''',
description='''
查看RDS实例的账号的权限信息 - 仅支持 MySQL,Percona,MariaDB。
示例: jdc rds describe-account-privilege --instance-id xxx --account-name xxx
''',
)
def describe_account_privilege(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeAccountPrivilegeRequest import DescribeAccountPrivilegeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeAccountPrivilegeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--account-name'], dict(help="""(string) 账号名,在同一个实例中账号名不能重复 """, dest='accountName', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 删除数据库账号,账号删除后不可恢复,用户无法再使用该账号登录RDS实例 ''',
description='''
删除数据库账号,账号删除后不可恢复,用户无法再使用该账号登录RDS实例。
示例: jdc rds delete-account --instance-id xxx --account-name xxx
''',
)
def delete_account(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DeleteAccountRequest import DeleteAccountRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DeleteAccountRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--account-name'], dict(help="""(string) 账号名,在同一个实例中账号名不能重复 """, dest='accountName', required=True)),
(['--account-privileges'], dict(help="""(array: accountPrivilege) 账号的访问权限 """, dest='accountPrivileges', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 授予账号的数据库访问权限,即该账号对数据库拥有什么权限。一个账号可以对多个数据库具有访问权限。<br>为便于管理,RDS对权限进行了归类,目前提供以下两种权限<br>- ro:只读权限,用户只能读取数据库中的数据,不能进行创建、插入、删除、更改等操作。<br>- rw:读写权限,用户可以对数据库进行增删改查等操作 ''',
description='''
授予账号的数据库访问权限,即该账号对数据库拥有什么权限。一个账号可以对多个数据库具有访问权限。<br>为便于管理,RDS对权限进行了归类,目前提供以下两种权限<br>- ro:只读权限,用户只能读取数据库中的数据,不能进行创建、插入、删除、更改等操作。<br>- rw:读写权限,用户可以对数据库进行增删改查等操作。
示例: jdc rds grant-privilege --instance-id xxx --account-name xxx --account-privileges ['{"":""}']
''',
)
def grant_privilege(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.GrantPrivilegeRequest import GrantPrivilegeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = GrantPrivilegeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--account-name'], dict(help="""(string) 账号名,在同一个实例中账号名不能重复 """, dest='accountName', required=True)),
(['--db-names'], dict(help="""(array: string) 需要取消授权的数据库的名称。权限取消后,该账号将不能访问此数据库 """, dest='dbNames', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 取消该账号对某个数据库的所有权限。权限取消后,该账号将不能访问此数据库。取消账号对某个数据库的访问权限,不影响该账号对其他数据库的访问权限 ''',
description='''
取消该账号对某个数据库的所有权限。权限取消后,该账号将不能访问此数据库。取消账号对某个数据库的访问权限,不影响该账号对其他数据库的访问权限。
示例: jdc rds revoke-privilege --instance-id xxx --account-name xxx
''',
)
def revoke_privilege(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.RevokePrivilegeRequest import RevokePrivilegeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = RevokePrivilegeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--account-name'], dict(help="""(string) 账号名,在同一个实例中账号名不能重复 """, dest='accountName', required=True)),
(['--account-password'], dict(help="""(string) 新密码,密码的具体规则可参见帮助中心文档:[名称及密码限制](../../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md) """, dest='accountPassword', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 重置数据库账号密码。如果用户忘记账号的密码,可以使用该接口重置指定账号密码。密码重置后,以前的密码将无法使用,必须使用重置后的新密码登录或连接数据库实例。 ''',
description='''
重置数据库账号密码。如果用户忘记账号的密码,可以使用该接口重置指定账号密码。密码重置后,以前的密码将无法使用,必须使用重置后的新密码登录或连接数据库实例。。
示例: jdc rds reset-password --instance-id xxx --account-name xxx --account-password xxx
''',
)
def reset_password(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ResetPasswordRequest import ResetPasswordRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ResetPasswordRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--account-name'], dict(help="""(string) 账号名,在同一个RDS实例中,账号名不能重复。账号名的具体规则可参见帮助中心文档:[名称及密码限制](../../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md) """, dest='accountName', required=True)),
(['--account-password'], dict(help="""(string) 密码,密码的具体规则可参见帮助中心文档:[名称及密码限制](../../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md) """, dest='accountPassword', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 创建数据库账号,用户可以使用客户端,应用程序等通过该账号和密码登录RDS数据库实例。<br>为便于管理和恢复,RDS对账号进行了限制,数据库账号只能通过控制台或者OpenAPI进行创建、删除账号以及对账号授权等,用户不能通过SQL语句对账号进行相关操作。 ''',
description='''
创建数据库账号,用户可以使用客户端,应用程序等通过该账号和密码登录RDS数据库实例。<br>为便于管理和恢复,RDS对账号进行了限制,数据库账号只能通过控制台或者OpenAPI进行创建、删除账号以及对账号授权等,用户不能通过SQL语句对账号进行相关操作。。
示例: jdc rds create-super-account --instance-id xxx --account-name xxx --account-password xxx
''',
)
def create_super_account(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateSuperAccountRequest import CreateSuperAccountRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateSuperAccountRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--account-name'], dict(help="""(string) 账号名,在同一个实例中账号名不能重复 """, dest='accountName', required=True)),
(['--database-privileges'], dict(help="""(array: array) 设置数据库细粒度权限内容 """, dest='databasePrivileges', required=False)),
(['--global-privileges'], dict(help="""(array) 设置全局权限,权限的具体定义参见[枚举参数定义](../Enum-Definitions/Enum-Definitions.md) """, dest='globalPrivileges', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 授予账号的数据库细粒度的访问权限 - 仅支持 MySQL,Percona,MariaDB ''',
description='''
授予账号的数据库细粒度的访问权限 - 仅支持 MySQL,Percona,MariaDB。
示例: jdc rds grant-account-privilege --instance-id xxx --account-name xxx
''',
)
def grant_account_privilege(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.GrantAccountPrivilegeRequest import GrantAccountPrivilegeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = GrantAccountPrivilegeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看当前实例已开启的审计选项。如当前实例未开启审计,则返回空<br>- 仅支持SQL Server ''',
description='''
查看当前实例已开启的审计选项。如当前实例未开启审计,则返回空<br>- 仅支持SQL Server。
示例: jdc rds describe-audit --instance-id xxx
''',
)
def describe_audit(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeAuditRequest import DescribeAuditRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeAuditRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--enabled'], dict(help="""(string) 要开启的审计选项,各个选项之间用英文逗号或空格进行分割,例如:DATABASE_OBJECT_ACCESS_GROUP,ACKUP_RESTORE_GROU等<br>各个数据库版本支持的审计选项可以通过接口[describeAuditOptions](./describeAuditOptions.md)获得,各个审计项的具体含义可以参看微软的官方文档 """, dest='enabled', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 开启SQL Server的数据库审计功能,目前支持实例级的数据库审计。用户可以根据需要开启、关闭审计、自定义审计策略,并下载审计文件。审计文件为原生的SQL Server审计文件,缺省保存6个月。<br>- 仅支持SQL Server ''',
description='''
开启SQL Server的数据库审计功能,目前支持实例级的数据库审计。用户可以根据需要开启、关闭审计、自定义审计策略,并下载审计文件。审计文件为原生的SQL Server审计文件,缺省保存6个月。<br>- 仅支持SQL Server。
示例: jdc rds create-audit --instance-id xxx --enabled xxx
''',
)
def create_audit(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateAuditRequest import CreateAuditRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateAuditRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 关闭数据库审计。关闭数据库审计后,以前生成的审计结果文件并不会被立即删除。审计结果文件会过期后由系统自动删除,过期时间缺省为6个月<br>- 仅支持SQL Server ''',
description='''
关闭数据库审计。关闭数据库审计后,以前生成的审计结果文件并不会被立即删除。审计结果文件会过期后由系统自动删除,过期时间缺省为6个月<br>- 仅支持SQL Server。
示例: jdc rds delete-audit --instance-id xxx
''',
)
def delete_audit(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DeleteAuditRequest import DeleteAuditRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DeleteAuditRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--name'], dict(help="""(string) 审计选项类别,**大小写敏感**,目前支持两种类型:<br>(1)AuditOptions开头:在disalbed参数中返回SQL Server各个版本支持的所有选项,支持的名称为<br>AuditOptions2008R2<br>AuditOptions2012<br>AuditOptions2014<br>AuditOptions2016<br>例如输入参数为"AuditOptions2016",则在disabled字段中返回SQL Server 2016 版本所支持的所有的审计选项<br>(2)AuditDefault开头:京东云建议的默认选项,在enabled参数中返回建议开启的选项,在disabled参数中返回不开启的选项,支持的名称为:<br>AuditDefault2008R2<br>AuditDefault2012<br>AuditDefault2014<br>AuditDefault2016<br>例如输入参数为"AuditDefault2016",则在enabled字段返回SQL Server 2016 版本中京东云建议开启的审计选项,在disabled字段中返回建议不开启的选项 """, dest='name', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取当前系统所支持的各种数据库版本的审计选项及相应的推荐选项<br>- 仅支持SQL Server ''',
description='''
获取当前系统所支持的各种数据库版本的审计选项及相应的推荐选项<br>- 仅支持SQL Server。
示例: jdc rds describe-audit-options --instance-id xxx --name xxx
''',
)
def describe_audit_options(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeAuditOptionsRequest import DescribeAuditOptionsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeAuditOptionsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--add'], dict(help="""(string) 在原有审计项基础上,增加新的审计项,多个审计项之间用英文逗号,分号或空格分隔,例如DATABASE_OBJECT_ACCESS_GROUP,ACKUP_RESTORE_GROUP """, dest='add', required=False)),
(['--drop'], dict(help="""(string) 删除审计项,多个审计项之间用英文逗号,分号或空格分隔,例如DATABASE_OBJECT_ACCESS_GROUP,ACKUP_RESTORE_GROUP<br>如删除了所有审计项,则审计自动关闭 """, dest='drop', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改当前的审计选项。当前已有审计选项可以通过describeAudit获得,支持的全部选项可以通过getAuditOptions获得。<br>- 仅支持SQL Server ''',
description='''
修改当前的审计选项。当前已有审计选项可以通过describeAudit获得,支持的全部选项可以通过getAuditOptions获得。<br>- 仅支持SQL Server。
示例: jdc rds modify-audit --instance-id xxx
''',
)
def modify_audit(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyAuditRequest import ModifyAuditRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyAuditRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取当前实例下的所有审计结果文件的列表<br>- 仅支持SQL Server ''',
description='''
获取当前实例下的所有审计结果文件的列表<br>- 仅支持SQL Server。
示例: jdc rds describe-audit-files --instance-id xxx
''',
)
def describe_audit_files(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeAuditFilesRequest import DescribeAuditFilesRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeAuditFilesRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--file-name'], dict(help="""(string) 审计文件名 """, dest='fileName', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取某个审计文件的下载链接,同时支持内链和外链,链接的有效时间为24小时<br>- 仅支持SQL Server ''',
description='''
获取某个审计文件的下载链接,同时支持内链和外链,链接的有效时间为24小时<br>- 仅支持SQL Server。
示例: jdc rds describe-audit-download-url --instance-id xxx --file-name xxx
''',
)
def describe_audit_download_url(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeAuditDownloadURLRequest import DescribeAuditDownloadURLRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeAuditDownloadURLRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 仅支持MySQL实例开启数据库审计<br>- 仅支持 MySQL 5.6, MySQL 5.7, Percona, MariaDB, PostgreSQL ''',
description='''
仅支持MySQL实例开启数据库审计<br>- 仅支持 MySQL 5.6, MySQL 5.7, Percona, MariaDB, PostgreSQL。
示例: jdc rds enable-audit --instance-id xxx
''',
)
def enable_audit(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.EnableAuditRequest import EnableAuditRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = EnableAuditRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 仅支持MySQL实例关闭数据库审计<br>- 仅支持 MySQL 5.6, MySQL 5.7, Percona, MariaDB, PostgreSQL ''',
description='''
仅支持MySQL实例关闭数据库审计<br>- 仅支持 MySQL 5.6, MySQL 5.7, Percona, MariaDB, PostgreSQL。
示例: jdc rds disable-audit --instance-id xxx
''',
)
def disable_audit(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DisableAuditRequest import DisableAuditRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DisableAuditRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--start-time'], dict(help="""(string) 查询开始时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间不能早于当前时间30天 """, dest='startTime', required=True)),
(['--end-time'], dict(help="""(string) 查询截止时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间到结束时间不能超过3天 """, dest='endTime', required=True)),
(['--db-name'], dict(help="""(string) 数据库名 """, dest='dbName', required=False)),
(['--account-name'], dict(help="""(string) 账号名 """, dest='accountName', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞) """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:10、20、50 """, dest='pageSize', type=int, required=False)),
(['--filters'], dict(help="""(array: filter) 过滤参数,多个过滤参数之间的关系为“与”(and); 支持以下属性的过滤:; operation; """, dest='filters', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 仅支持查看MySQL实例的审计内容<br>- 仅支持 MySQL 5.6, MySQL 5.7, Percona, MariaDB, PostgreSQL ''',
description='''
仅支持查看MySQL实例的审计内容<br>- 仅支持 MySQL 5.6, MySQL 5.7, Percona, MariaDB, PostgreSQL。
示例: jdc rds describe-audit-result --instance-id xxx --start-time xxx --end-time xxx
''',
)
def describe_audit_result(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeAuditResultRequest import DescribeAuditResultRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeAuditResultRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--engine'], dict(help="""(string) RDS引擎类型,参见[枚举参数定义](../Enum-Definitions/Enum-Definitions.md) """, dest='engine', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看指定地域下各种RDS数据库支持的可用区,不同类型的RDS支持的可用区不一样 ''',
description='''
查看指定地域下各种RDS数据库支持的可用区,不同类型的RDS支持的可用区不一样。
示例: jdc rds describe-azs --engine xxx
''',
)
def describe_azs(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeAzsRequest import DescribeAzsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeAzsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS实例ID,唯一标识一个实例 """, dest='instanceId', required=True)),
(['--auto'], dict(help="""(int) 查询备份类型,0为手动备份,1为自动备份,不传表示全部. <br>**- 测试参数,仅支持SQL Server,后续可能被其他参数取代** """, dest='auto', type=int, required=False)),
(['--backup-type-filter'], dict(help="""(string) 返回backupType等于指定值的备份列表。full为全量备份,diff为增量备份<br>**- 测试参数,仅支持SQL Server,后续可能被其他参数取代** """, dest='backupTypeFilter', required=False)),
(['--db-name-filter'], dict(help="""(string) 返回dbName等于指定值的备份列表,不传或为空返回全部<br>**- 测试参数,仅支持SQL Server,后续可能被其他参数取代** """, dest='dbNameFilter', required=False)),
(['--backup-time-range-start-filter'], dict(help="""(string) 返回备份开始时间大于该时间的备份列表<br>**- 测试参数,仅支持SQL Server,后续可能被其他参数取代** """, dest='backupTimeRangeStartFilter', required=False)),
(['--backup-time-range-end-filter'], dict(help="""(string) 返回备份开始时间小于等于该时间的备份列表<br>**- 测试参数,仅支持SQL Server,后续可能被其他参数取代** """, dest='backupTimeRangeEndFilter', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页。 """, dest='pageNumber', type=int, required=True)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:10、20、30、50、100 """, dest='pageSize', type=int, required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看该RDS实例下所有备份的详细信息,返回的备份列表按照备份开始时间(backupStartTime)降序排列。 ''',
description='''
查看该RDS实例下所有备份的详细信息,返回的备份列表按照备份开始时间(backupStartTime)降序排列。。
示例: jdc rds describe-backups --instance-id xxx --page-number 0 --page-size 0
''',
)
def describe_backups(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeBackupsRequest import DescribeBackupsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeBackupsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS实例ID,唯一标识一个实例 """, dest='instanceId', required=False)),
(['--backup-spec'], dict(help="""(backupSpec) 备份规格 """, dest='backupSpec', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 创建一个RDS实例全量备份,可以对整个实例或者部分数据库(仅SQL Server支持)进行全量备份。同一时间点,只能有一个正在运行的备份任务 ''',
description='''
创建一个RDS实例全量备份,可以对整个实例或者部分数据库(仅SQL Server支持)进行全量备份。同一时间点,只能有一个正在运行的备份任务。
示例: jdc rds create-backup
''',
)
def create_backup(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateBackupRequest import CreateBackupRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateBackupRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--backup-id'], dict(help="""(string) 备份ID """, dest='backupId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 删除RDS实例备份,仅允许删除用户生成的备份,系统自动备份不允许删除。 ''',
description='''
删除RDS实例备份,仅允许删除用户生成的备份,系统自动备份不允许删除。。
示例: jdc rds delete-backup --backup-id xxx
''',
)
def delete_backup(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DeleteBackupRequest import DeleteBackupRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DeleteBackupRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--backup-id'], dict(help="""(string) 备份ID """, dest='backupId', required=True)),
(['--file-name'], dict(help="""(string) 文件名称<br>- MySQL:不支持该参数<br>- SQL Server:必须输入该参数,指定该备份中需要获取下载链接的文件名称。备份中的文件名(不包括后缀)即为备份的数据库名。例如文件名为my_test_db.bak,表示该文件是my_test_db数据库的备份 """, dest='fileName', required=False)),
(['--url-expiration-second'], dict(help="""(string) 指定下载链接的过期时间,单位秒, 取值范围为 1 ~ 86400 秒;支持 SQL Server:缺省为 86400 秒。支持 MySQL, Percona, MariaDB:缺省为 300 秒。 """, dest='urlExpirationSecond', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取整个备份或备份中单个文件的下载链接。<br>- 当输入参数中有文件名时,获取该文件的下载链接。<br>- 输入参数中无文件名时,获取整个备份的下载链接。<br>由于备份机制的差异,使用该接口下载备份时,SQL Server必须输入文件名,每个文件逐一下载,不支持下载整个备份。SQL Server备份中的文件名(不包括后缀)即为备份的数据库名。例如文件名为my_test_db.bak,表示该文件是my_test_db数据库的备份。<br>MySQL可下载整个备份集,但不支持单个文件的下载。 ''',
description='''
获取整个备份或备份中单个文件的下载链接。<br>- 当输入参数中有文件名时,获取该文件的下载链接。<br>- 输入参数中无文件名时,获取整个备份的下载链接。<br>由于备份机制的差异,使用该接口下载备份时,SQL Server必须输入文件名,每个文件逐一下载,不支持下载整个备份。SQL Server备份中的文件名(不包括后缀)即为备份的数据库名。例如文件名为my_test_db.bak,表示该文件是my_test_db数据库的备份。<br>MySQL可下载整个备份集,但不支持单个文件的下载。。
示例: jdc rds describe-backup-download-url --backup-id xxx
''',
)
def describe_backup_download_url(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeBackupDownloadURLRequest import DescribeBackupDownloadURLRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeBackupDownloadURLRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[1,∞),页码超过总页数时,显示最后一页,用于查询列表的接口 """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:10、20、30、50、100 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查询跨地域备份同步服务列表。 ''',
description='''
查询跨地域备份同步服务列表。。
示例: jdc rds describe-backup-synchronicities
''',
)
def describe_backup_synchronicities(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeBackupSynchronicitiesRequest import DescribeBackupSynchronicitiesRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeBackupSynchronicitiesRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) 源实例ID """, dest='instanceId', required=True)),
(['--dest-region'], dict(help="""(string) 备份同步的目标地域 """, dest='destRegion', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 创建一个跨地域备份同步服务。 ''',
description='''
创建一个跨地域备份同步服务。。
示例: jdc rds create-backup-synchronicity --instance-id xxx --dest-region xxx
''',
)
def create_backup_synchronicity(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateBackupSynchronicityRequest import CreateBackupSynchronicityRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateBackupSynchronicityRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--service-id'], dict(help="""(string) 跨地域备份同步服务ID """, dest='serviceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 删除一个跨地域备份同步服务。 ''',
description='''
删除一个跨地域备份同步服务。。
示例: jdc rds delete-backup-synchronicity --service-id xxx
''',
)
def delete_backup_synchronicity(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DeleteBackupSynchronicityRequest import DeleteBackupSynchronicityRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DeleteBackupSynchronicityRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--restore-time'], dict(help="""(string) 根据源实例的哪个时间点创建新实例 """, dest='restoreTime', required=True)),
(['--service-id'], dict(help="""(string) 跨地域备份同步服务ID """, dest='serviceId', required=True)),
(['--instance-spec'], dict(help="""(restoredNewDBInstanceSpec) 新建实例规格 """, dest='instanceSpec', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 根据跨地域备份同步服务时间点创建实例。 ''',
description='''
根据跨地域备份同步服务时间点创建实例。。
示例: jdc rds create-instance-by-time-in-cross-region --restore-time xxx --service-id xxx --instance-spec '{"":""}'
''',
)
def create_instance_by_time_in_cross_region(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateInstanceByTimeInCrossRegionRequest import CreateInstanceByTimeInCrossRegionRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateInstanceByTimeInCrossRegionRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页。 """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:10、20、30、50、100 """, dest='pageSize', type=int, required=False)),
(['--start-time'], dict(help="""(string) 查询开始时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间到结束时间不超过三天 """, dest='startTime', required=False)),
(['--end-time'], dict(help="""(string) 查询结束时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间到结束时间不超过三天 """, dest='endTime', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取MySQL实例中binlog的详细信息<br>- 仅支持 MySQL, Percona, MariaDB ''',
description='''
获取MySQL实例中binlog的详细信息<br>- 仅支持 MySQL, Percona, MariaDB。
示例: jdc rds describe-binlogs --instance-id xxx
''',
)
def describe_binlogs(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeBinlogsRequest import DescribeBinlogsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeBinlogsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--binlog-backup-id'], dict(help="""(string) binlog的备份ID,可以通过describeBinlogs获得 """, dest='binlogBackupId', required=True)),
(['--seconds'], dict(help="""(int) 设置链接地址的过期时间,单位是秒,默认值是 300 秒,最长不能超过取值范围为 1 ~ 86400 秒 """, dest='seconds', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取MySQL实例的binlog的下载链接<br>- 仅支持 MySQL, Percona, MariaDB ''',
description='''
获取MySQL实例的binlog的下载链接<br>- 仅支持 MySQL, Percona, MariaDB。
示例: jdc rds describe-binlog-download-url --instance-id xxx --binlog-backup-id xxx
''',
)
def describe_binlog_download_url(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeBinlogDownloadURLRequest import DescribeBinlogDownloadURLRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeBinlogDownloadURLRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 清理本地的binlog并释放空间。 系统只会清理已经备份到存储的binlog,不会影响MySQL实例的备份恢复<br>- 仅支持MySQL ''',
description='''
清理本地的binlog并释放空间。 系统只会清理已经备份到存储的binlog,不会影响MySQL实例的备份恢复<br>- 仅支持MySQL。
示例: jdc rds clear-binlogs --instance-id xxx
''',
)
def clear_binlogs(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ClearBinlogsRequest import ClearBinlogsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ClearBinlogsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--database'], dict(help="""(string) DDL命令修改的库名 """, dest='database', required=True)),
(['--table'], dict(help="""(string) DDL命令修改的表名 """, dest='table', required=True)),
(['--command'], dict(help="""(string) 需要执行的的DDL命令 """, dest='command', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 通过 PT-OSC 服务来处理 DDL 命令, 避免锁表。此接口暂是对部分用户开放 ''',
description='''
通过 PT-OSC 服务来处理 DDL 命令, 避免锁表。此接口暂是对部分用户开放。
示例: jdc rds alter-table-with-online-ddl --instance-id xxx --database xxx --table xxx --command xxx
''',
)
def alter_table_with_online_ddl(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.AlterTableWithOnlineDDLRequest import AlterTableWithOnlineDDLRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = AlterTableWithOnlineDDLRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--engine'], dict(help="""(string) 设置可见的引擎类型,如 MySQL 等 """, dest='engine', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看云数据库 RDS 的权限信息 - 仅支持 MySQL,Percona,MariaDB ''',
description='''
查看云数据库 RDS 的权限信息 - 仅支持 MySQL,Percona,MariaDB。
示例: jdc rds describe-privilege --engine xxx
''',
)
def describe_privilege(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribePrivilegeRequest import DescribePrivilegeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribePrivilegeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--db-name'], dict(help="""(string) 数据库名。如果不指定数据库名,则返回所有数据库列表<br>- **MySQL:不支持该字段**<br>- **SQL Server:支持该字段** """, dest='dbName', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页; """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为100,取值范围:[10,100],用于查询列表的接口 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取当前实例的所有数据库详细信息的列表 ''',
description='''
获取当前实例的所有数据库详细信息的列表。
示例: jdc rds describe-databases --instance-id xxx
''',
)
def describe_databases(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeDatabasesRequest import DescribeDatabasesRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeDatabasesRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--db-name'], dict(help="""(string) 数据库名,数据库名称的限制请参考[帮助中心文档](../../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md) """, dest='dbName', required=True)),
(['--character-set-name'], dict(help="""(string) 数据库的字符集名,当前支持的字符集请查看[枚举参数定义](../Enum-Definitions/Enum-Definitions.md) """, dest='characterSetName', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 创建一个数据库。 为了实例的管理和数据恢复,RDS对用户权限进行了限制,用户仅能通过控制台或本接口创建数据库 ''',
description='''
创建一个数据库。 为了实例的管理和数据恢复,RDS对用户权限进行了限制,用户仅能通过控制台或本接口创建数据库。
示例: jdc rds create-database --instance-id xxx --db-name xxx --character-set-name xxx
''',
)
def create_database(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateDatabaseRequest import CreateDatabaseRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateDatabaseRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--db-name'], dict(help="""(string) 库名称 """, dest='dbName', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 从RDS实例中删除数据库。为便于管理和数据恢复,RDS对用户权限进行了控制,用户仅能通过控制台或本接口删除数据库 [MFA enabled] ''',
description='''
从RDS实例中删除数据库。为便于管理和数据恢复,RDS对用户权限进行了控制,用户仅能通过控制台或本接口删除数据库 [MFA enabled]。
示例: jdc rds delete-database --instance-id xxx --db-name xxx
''',
)
def delete_database(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DeleteDatabaseRequest import DeleteDatabaseRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DeleteDatabaseRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--db-name'], dict(help="""(string) 库名称 """, dest='dbName', required=True)),
(['--backup-id'], dict(help="""(string) 备份ID,可从备份查询接口describeBackups获取 """, dest='backupId', required=True)),
(['--backup-file-name'], dict(help="""(string) 指定该备份中用于恢复数据库的文件名称。通常情况下文件名(不包括后缀)即为备份的数据库名。例如文件名为my_test_db.bak,表示该文件是my_test_db数据库的备份 """, dest='backupFileName', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 从备份中恢复单个数据库,支持从其他实例(但必须是同一个账号下的实例)备份中恢复。例如可以从生产环境的数据库实例的备份恢复到测试环境的数据库中。<br>- 仅支持SQL Server ''',
description='''
从备份中恢复单个数据库,支持从其他实例(但必须是同一个账号下的实例)备份中恢复。例如可以从生产环境的数据库实例的备份恢复到测试环境的数据库中。<br>- 仅支持SQL Server。
示例: jdc rds restore-database-from-backup --instance-id xxx --db-name xxx --backup-id xxx --backup-file-name xxx
''',
)
def restore_database_from_backup(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.RestoreDatabaseFromBackupRequest import RestoreDatabaseFromBackupRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = RestoreDatabaseFromBackupRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--db-name'], dict(help="""(string) 库名称 """, dest='dbName', required=True)),
(['--shared-file-gid'], dict(help="""(string) 共享文件的全局ID,可从上传文件查询接口[describeImportFiles](../Cloud-on-Single-Database/describeImportFiles.md)获取;如果该文件不是共享文件,则不用输入该参数 """, dest='sharedFileGid', required=False)),
(['--file-name'], dict(help="""(string) 用户上传的备份文件名称(包括文件后缀名),例如mydb1.bak """, dest='fileName', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 从用户通过单库上云工具上传到云上的备份文件中恢复单个数据库<br>- 仅支持SQL Server ''',
description='''
从用户通过单库上云工具上传到云上的备份文件中恢复单个数据库<br>- 仅支持SQL Server。
示例: jdc rds restore-database-from-file --instance-id xxx --db-name xxx --file-name xxx
''',
)
def restore_database_from_file(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.RestoreDatabaseFromFileRequest import RestoreDatabaseFromFileRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = RestoreDatabaseFromFileRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--db-name'], dict(help="""(string) 库名称 """, dest='dbName', required=True)),
(['--oss-url'], dict(help="""(string) 用户上传到对象存储OSS上的备份文件的路径。<br>例如用户备份上传的bucket为db_backup,文件为test_server/db1.bak,那么ossULR为db_backup/test_server/db1.bak。<br>**授权说明**:需要授予账户ID:785455908940,对这个bucket的读取权限,具体步骤可以查看[文档](https://docs.jdcloud.com/cn/object-storage-service/set-bucket-policy-2)。 """, dest='ossURL', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 从上传到OSS的备份文件中恢复单个数据库<br>- 仅支持SQL Server ''',
description='''
从上传到OSS的备份文件中恢复单个数据库<br>- 仅支持SQL Server。
示例: jdc rds restore-database-from-oss --instance-id xxx --db-name xxx --oss-url xxx
''',
)
def restore_database_from_oss(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.RestoreDatabaseFromOSSRequest import RestoreDatabaseFromOSSRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = RestoreDatabaseFromOSSRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取SQL Server 错误日志及下载信息<br>- 仅支持SQL Server ''',
description='''
获取SQL Server 错误日志及下载信息<br>- 仅支持SQL Server。
示例: jdc rds describe-error-logs --instance-id xxx
''',
)
def describe_error_logs(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeErrorLogsRequest import DescribeErrorLogsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeErrorLogsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取用户通过单库上云工具上传到该实例上的文件列表<br>- 仅支持SQL Server ''',
description='''
获取用户通过单库上云工具上传到该实例上的文件列表<br>- 仅支持SQL Server。
示例: jdc rds describe-import-files --instance-id xxx
''',
)
def describe_import_files(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeImportFilesRequest import DescribeImportFilesRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeImportFilesRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取单库上云工具上传文件的需要的Key。单库上云工具需要正确的key值方能连接到京东云<br>- 仅支持SQL Server ''',
description='''
获取单库上云工具上传文件的需要的Key。单库上云工具需要正确的key值方能连接到京东云<br>- 仅支持SQL Server。
示例: jdc rds get-upload-key --instance-id xxx
''',
)
def get_upload_key(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.GetUploadKeyRequest import GetUploadKeyRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = GetUploadKeyRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--file-name'], dict(help="""(string) 单库上云文件名 """, dest='fileName', required=True)),
(['--shared'], dict(help="""(string) 文件是否共享<br>true:共享<br>false:不共享 """, dest='shared', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 设置或取消上传文件是否共享给同一账号下的其他实例。缺省情况下,文件仅在上传的实例上可见并可导入,其他实例不可见不可导入。如果需要该文件在其他实例上也可导入,可将此文件设置为共享<br>- 仅支持SQL Server ''',
description='''
设置或取消上传文件是否共享给同一账号下的其他实例。缺省情况下,文件仅在上传的实例上可见并可导入,其他实例不可见不可导入。如果需要该文件在其他实例上也可导入,可将此文件设置为共享<br>- 仅支持SQL Server。
示例: jdc rds set-import-file-shared --instance-id xxx --file-name xxx --shared xxx
''',
)
def set_import_file_shared(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.SetImportFileSharedRequest import SetImportFileSharedRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = SetImportFileSharedRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--file-name'], dict(help="""(string) 单库上云文件名 """, dest='fileName', required=True)),
(['--shared-file-gid'], dict(help="""(string) 共享文件的全局ID,可从上传文件查询接口describeImportFiles中获取;如果该文件不是共享文件,则无须输入该字段 """, dest='sharedFileGid', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 删除用户通过单库上云工具上传的数据库备份文件<br>- 仅支持SQL Server ''',
description='''
删除用户通过单库上云工具上传的数据库备份文件<br>- 仅支持SQL Server。
示例: jdc rds delete-import-file --instance-id xxx --file-name xxx
''',
)
def delete_import_file(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DeleteImportFileRequest import DeleteImportFileRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DeleteImportFileRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页; """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:[10,100],且为10的整数倍 """, dest='pageSize', type=int, required=False)),
(['--filters'], dict(help="""(array: filter) 过滤参数,多个过滤参数之间的关系为“与”(and); 支持以下属性的过滤:; instanceId, 支持operator选项:eq; instanceName, 支持operator选项:eq, like; engine, 支持operator选项:eq; engineVersion, 支持operator选项:eq; instanceStatus, 支持operator选项:eq; vpcId, 支持operator选项:eq; instanceType, 支持operator选项:eq; internalDomainName, 支持operator选项:eq; publicDomainName, 支持operator选项:eq; """, dest='filters', required=False)),
(['--tag-filters'], dict(help="""(array: tagFilter) 资源标签 """, dest='tagFilters', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取当前账号下所有RDS实例及MySQL/PostgreSQL只读实例的概要信息,例如实例类型,版本,计费信息等 ''',
description='''
获取当前账号下所有RDS实例及MySQL/PostgreSQL只读实例的概要信息,例如实例类型,版本,计费信息等。
示例: jdc rds describe-instances
''',
)
def describe_instances(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeInstancesRequest import DescribeInstancesRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeInstancesRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-spec'], dict(help="""(dBInstanceSpec) 新建实例规格 """, dest='instanceSpec', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 创建一个RDS实例,用户可以使用相应的数据库客户端或者应用程序通过域名和端口链接到该RDS实例上,进行操作。 ''',
description='''
创建一个RDS实例,用户可以使用相应的数据库客户端或者应用程序通过域名和端口链接到该RDS实例上,进行操作。。
示例: jdc rds create-instance --instance-spec '{"":""}'
''',
)
def create_instance(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateInstanceRequest import CreateInstanceRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateInstanceRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查询RDS实例(MySQL、SQL Server等)的详细信息以及MySQL/PostgreSQL只读实例详细信息 ''',
description='''
查询RDS实例(MySQL、SQL Server等)的详细信息以及MySQL/PostgreSQL只读实例详细信息。
示例: jdc rds describe-instance-attributes --instance-id xxx
''',
)
def describe_instance_attributes(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeInstanceAttributesRequest import DescribeInstanceAttributesRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeInstanceAttributesRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 删除一个RDS实例或者MySQL/PostgreSQL的只读实例。删除MySQL/PostgreSQL主实例时,会同时将对应的MySQL/PostgreSQL只读实例也删除 [MFA enabled] ''',
description='''
删除一个RDS实例或者MySQL/PostgreSQL的只读实例。删除MySQL/PostgreSQL主实例时,会同时将对应的MySQL/PostgreSQL只读实例也删除 [MFA enabled]。
示例: jdc rds delete-instance --instance-id xxx
''',
)
def delete_instance(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DeleteInstanceRequest import DeleteInstanceRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DeleteInstanceRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看RDS实例备份策略。根据数据库类型的不同,支持的备份策略也略有差异,具体请看返回参数中的详细说明 ''',
description='''
查看RDS实例备份策略。根据数据库类型的不同,支持的备份策略也略有差异,具体请看返回参数中的详细说明。
示例: jdc rds describe-backup-policy --instance-id xxx
''',
)
def describe_backup_policy(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeBackupPolicyRequest import DescribeBackupPolicyRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeBackupPolicyRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--start-window'], dict(help="""(string) 自动备份开始时间窗口,例如:00:00-01:00,表示0点到1点开始进行数据库自动备份,备份完成时间则跟实例大小有关,不一定在这个时间范围中<br>SQL Server:范围00:00-23:59,时间范围差不得小于30分钟。<br>MySQL,只能是以下取值:<br>00:00-01:00<br>01:00-02:00<br>......<br>23:00-24:00 """, dest='startWindow', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改RDS实例备份策略,目前仅支持用户修改“自动备份开始时间窗口”这个参数,其他参数暂不开放修改 ''',
description='''
修改RDS实例备份策略,目前仅支持用户修改“自动备份开始时间窗口”这个参数,其他参数暂不开放修改。
示例: jdc rds modify-backup-policy --instance-id xxx
''',
)
def modify_backup_policy(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyBackupPolicyRequest import ModifyBackupPolicyRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyBackupPolicyRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--instance-name'], dict(help="""(string) 实例名称,名称支持中文,实例名的具体规则可参见帮助中心文档:[名称及密码限制](../../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md) """, dest='instanceName', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改实例名称,可支持中文,实例名的具体规则可参见帮助中心文档:[名称及密码限制](../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md) ''',
description='''
修改实例名称,可支持中文,实例名的具体规则可参见帮助中心文档:[名称及密码限制](../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md)。
示例: jdc rds modify-instance-name --instance-id xxx --instance-name xxx
''',
)
def modify_instance_name(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyInstanceNameRequest import ModifyInstanceNameRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyInstanceNameRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 对RDS实例进行主备切换。<br>注意:如果实例正在进行备份,那么主备切换将会终止备份操作。可以查看备份策略中的备份开始时间确认是否有备份正在运行。如果确实需要在实例备份时进行主备切换,建议切换完成 后,手工进行一次实例的全备<br>对于SQL Server,主备切换后30分钟内,不支持按时间点恢复/创建,例如在10:05分用户进行了主备切换,那么10:05 ~ 10:35这个时间段不能进行按时间点恢复/创建。<br>- 仅支持SQL Server ''',
description='''
对RDS实例进行主备切换。<br>注意:如果实例正在进行备份,那么主备切换将会终止备份操作。可以查看备份策略中的备份开始时间确认是否有备份正在运行。如果确实需要在实例备份时进行主备切换,建议切换完成 后,手工进行一次实例的全备<br>对于SQL Server,主备切换后30分钟内,不支持按时间点恢复/创建,例如在10:05分用户进行了主备切换,那么10:05 ~ 10:35这个时间段不能进行按时间点恢复/创建。<br>- 仅支持SQL Server。
示例: jdc rds failover-instance --instance-id xxx
''',
)
def failover_instance(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.FailoverInstanceRequest import FailoverInstanceRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = FailoverInstanceRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--reboot-master'], dict(help="""(bool) 是否重启主节点。<br> - 仅SQL Server 支持该参数 """, dest='rebootMaster', required=False)),
(['--reboot-slave'], dict(help="""(bool) 是否重启备节点。<br> - 仅SQL Server 支持该参数 """, dest='rebootSlave', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 重启RDS实例,例如修改了一些配置参数后,需要重启实例才能生效。可以结合主备切换的功能,轮流重启备机,降低对业务的影响<br>**注意:如果实例正在进行备份,那么重启主实例将会终止备份操作。** 可以查看备份策略中的备份开始时间确认是否有备份正在运行。如果确实需要在实例备份时重启主实例,建议重启后,手工进行一次实例的全备。 ''',
description='''
重启RDS实例,例如修改了一些配置参数后,需要重启实例才能生效。可以结合主备切换的功能,轮流重启备机,降低对业务的影响<br>**注意:如果实例正在进行备份,那么重启主实例将会终止备份操作。** 可以查看备份策略中的备份开始时间确认是否有备份正在运行。如果确实需要在实例备份时重启主实例,建议重启后,手工进行一次实例的全备。。
示例: jdc rds reboot-instance --instance-id xxx
''',
)
def reboot_instance(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.RebootInstanceRequest import RebootInstanceRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = RebootInstanceRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 开启RDS实例的外网访问功能。开启后,用户可以通过internet访问RDS实例 ''',
description='''
开启RDS实例的外网访问功能。开启后,用户可以通过internet访问RDS实例。
示例: jdc rds enable-internet-access --instance-id xxx
''',
)
def enable_internet_access(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.EnableInternetAccessRequest import EnableInternetAccessRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = EnableInternetAccessRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 关闭RDS实例的外网访问功能。关闭后,用户无法通过Internet访问RDS,但可以在京东云内网通过内网域名访问 ''',
description='''
关闭RDS实例的外网访问功能。关闭后,用户无法通过Internet访问RDS,但可以在京东云内网通过内网域名访问。
示例: jdc rds disable-internet-access --instance-id xxx
''',
)
def disable_internet_access(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DisableInternetAccessRequest import DisableInternetAccessRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DisableInternetAccessRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--backup-id'], dict(help="""(string) 用于恢复的备份Id,仅限于本实例生成的备份 """, dest='backupId', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 使用实例的全量备份覆盖恢复当前实例 ''',
description='''
使用实例的全量备份覆盖恢复当前实例。
示例: jdc rds restore-instance --instance-id xxx
''',
)
def restore_instance(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.RestoreInstanceRequest import RestoreInstanceRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = RestoreInstanceRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--backup-id'], dict(help="""(string) 备份ID """, dest='backupId', required=True)),
(['--engine'], dict(help="""(string) 标识是创建什么类型的实例,例如MySQL,SQL Server等,具体可参见文档[枚举参数定义](../Enum-Definitions/Enum-Definitions.md)<br>**注意:备份来源实例的engine和要创建的实例的engine必须一致** """, dest='engine', required=True)),
(['--instance-spec'], dict(help="""(restoredNewDBInstanceSpec) 新建实例规格 """, dest='instanceSpec', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 根据源实例全量备份创建一个新实例,新实例的数据跟源实例在创建备份时的数据状态一样。<br>例如根据源实例A的一个全量备份“mybak”新建一个实例B,该备份是在“‘2018-8-18 03:23:54”创建的。那么新建实例B的数据状态跟实例A‘2018-8-18 03:23:54’的状态一致 ''',
description='''
根据源实例全量备份创建一个新实例,新实例的数据跟源实例在创建备份时的数据状态一样。<br>例如根据源实例A的一个全量备份“mybak”新建一个实例B,该备份是在“‘2018-8-18 03:23:54”创建的。那么新建实例B的数据状态跟实例A‘2018-8-18 03:23:54’的状态一致。
示例: jdc rds create-instance-from-backup --backup-id xxx --engine xxx --instance-spec '{"":""}'
''',
)
def create_instance_from_backup(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateInstanceFromBackupRequest import CreateInstanceFromBackupRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateInstanceFromBackupRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--new-instance-class'], dict(help="""(string) 扩容后实例规格 """, dest='newInstanceClass', required=True)),
(['--new-instance-storage-gb'], dict(help="""(int) 扩容后实例磁盘大小 """, dest='newInstanceStorageGB', type=int, required=True)),
(['--new-instance-storage-type'], dict(help="""(string) 存储类型,如果不指定,默认会采用实例原存储类型. """, dest='newInstanceStorageType', required=False)),
(['--storage-encrypted'], dict(help="""(bool) 实例数据加密(存储类型为云硬盘才支持数据加密). false:不加密; true:加密. 如果实例从本地盘变为云硬盘,缺省为false. 如果实例本来就是使用云硬盘的,缺省和源实例保持一致 """, dest='storageEncrypted', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 实例扩容,支持升级实例的CPU,内存及磁盘。 ''',
description='''
实例扩容,支持升级实例的CPU,内存及磁盘。。
示例: jdc rds modify-instance-spec --instance-id xxx --new-instance-class xxx --new-instance-storage-gb 0
''',
)
def modify_instance_spec(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyInstanceSpecRequest import ModifyInstanceSpecRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyInstanceSpecRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--restore-time'], dict(help="""(string) 根据源实例的哪个时间点创建新实例 """, dest='restoreTime', required=True)),
(['--instance-spec'], dict(help="""(restoredNewDBInstanceSpec) 新建实例规格 """, dest='instanceSpec', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 根据源实例备份创建一个新实例,并通过追加日志的方式,将新实例的数据恢复到跟源实例指定时间点的数据状态一样。<br>例如根据实例A在“2018-06-18 23:00:00”时间点创建一个实例B,就是新建一个实例B,该实例B的数据跟实例A在“2018-06-18 23:00:00”这个时间点的数据完全一致。<br>对于SQL Server,主备切换后30分钟内,不支持按时间点恢复/创建,例如在10:05分用户进行了主备切换,那么10:05 ~ 10:35这个时间段不能进行按时间点恢复/创建。 ''',
description='''
根据源实例备份创建一个新实例,并通过追加日志的方式,将新实例的数据恢复到跟源实例指定时间点的数据状态一样。<br>例如根据实例A在“2018-06-18 23:00:00”时间点创建一个实例B,就是新建一个实例B,该实例B的数据跟实例A在“2018-06-18 23:00:00”这个时间点的数据完全一致。<br>对于SQL Server,主备切换后30分钟内,不支持按时间点恢复/创建,例如在10:05分用户进行了主备切换,那么10:05 ~ 10:35这个时间段不能进行按时间点恢复/创建。。
示例: jdc rds create-instance-by-time --instance-id xxx --restore-time xxx --instance-spec '{"":""}'
''',
)
def create_instance_by_time(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateInstanceByTimeRequest import CreateInstanceByTimeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateInstanceByTimeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--instance-name'], dict(help="""(string) 实例名称,具体规则可参见帮助中心文档:[名称及密码限制](../../../documentation/Database-and-Cache-Service/RDS/Introduction/Restrictions/SQLServer-Restrictions.md) """, dest='instanceName', required=True)),
(['--instance-class'], dict(help="""(string) 只读实例规格FlavorId """, dest='instanceClass', required=True)),
(['--instance-storage-type'], dict(help="""(string) 存储类型,参见[枚举参数定义](../Enum-Definitions/Enum-Definitions.md),缺省值为:LOCAL_SSD """, dest='instanceStorageType', required=False)),
(['--instance-storage-gb'], dict(help="""(int) 磁盘空间 """, dest='instanceStorageGB', type=int, required=True)),
(['--az-id'], dict(help="""(string) 可用区ID """, dest='azId', required=True)),
(['--vpc-id'], dict(help="""(string) VPC的ID,如果没有填写就保持和常规实例一样的VPC """, dest='vpcId', required=False)),
(['--subnet-id'], dict(help="""(string) 子网ID,如果没有填写就保持和常规实例一样的subnet """, dest='subnetId', required=False)),
(['--parameter-group'], dict(help="""(string) 参数组ID,缺省采用和常规实例一样的参数组 """, dest='parameterGroup', required=False)),
(['--storage-encrypted'], dict(help="""(bool) 实例数据加密(存储类型为云硬盘才支持数据加密)。false:不加密;true:加密。缺省为false。 """, dest='storageEncrypted', required=False)),
(['--count'], dict(help="""(int) 创建只读实例的数目,缺省为1 """, dest='count', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 创建MySQL的只读实例<br> - 仅支持MySQL<br> - 创建的只读实例跟主实例在同一个VPC同一个子网中<br> * 只读实例只支持按配置计费 ''',
description='''
创建MySQL的只读实例<br> - 仅支持MySQL<br> - 创建的只读实例跟主实例在同一个VPC同一个子网中<br> * 只读实例只支持按配置计费。
示例: jdc rds create-roinstance --instance-id xxx --instance-name xxx --instance-class xxx --instance-storage-gb 0 --az-id xxx
''',
)
def create_roinstance(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateROInstanceRequest import CreateROInstanceRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateROInstanceRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--connection-mode'], dict(help="""(string) 连接模式<br> - standard:标准模式(缺省),响应时间短,但没有 SQL 审计和拦截的能力 <br>- security:高安全模式,具备一定的 SQL注入拦截能力,并可开启 SQL 审计,但会增加一定的响应时间 """, dest='connectionMode', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改MySQL实例的连接模式:标准模式(standard) 和高安全模式(security).<br>- **标准模式**:响应时间短,但没有 SQL 审计和拦截的能力。<br>- **高安全模式**:具备一定的 SQL注入拦截能力(通过分析表达式、关键系统函数等来实现防御 SQL 注入攻击),并可开启 SQL 审计,但会增加一定的响应时间。<br>- 仅支持MySQL ''',
description='''
修改MySQL实例的连接模式:标准模式(standard) 和高安全模式(security).<br>- **标准模式**:响应时间短,但没有 SQL 审计和拦截的能力。<br>- **高安全模式**:具备一定的 SQL注入拦截能力(通过分析表达式、关键系统函数等来实现防御 SQL 注入攻击),并可开启 SQL 审计,但会增加一定的响应时间。<br>- 仅支持MySQL。
示例: jdc rds modify-connection-mode --instance-id xxx --connection-mode xxx
''',
)
def modify_connection_mode(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyConnectionModeRequest import ModifyConnectionModeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyConnectionModeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取SQL Server实例按时间点恢复/创建时,可恢复到的最后的一个时间点<br>- 仅支持SQL Server ''',
description='''
获取SQL Server实例按时间点恢复/创建时,可恢复到的最后的一个时间点<br>- 仅支持SQL Server。
示例: jdc rds describe-latest-restore-time --instance-id xxx
''',
)
def describe_latest_restore_time(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeLatestRestoreTimeRequest import DescribeLatestRestoreTimeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeLatestRestoreTimeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--parameter-group-id'], dict(help="""(string) 参数组ID """, dest='parameterGroupId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改RDS实例的参数组<br>- 仅支持MySQL ''',
description='''
修改RDS实例的参数组<br>- 仅支持MySQL。
示例: jdc rds modify-parameter-group --instance-id xxx --parameter-group-id xxx
''',
)
def modify_parameter_group(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyParameterGroupRequest import ModifyParameterGroupRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyParameterGroupRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--target-instance-id'], dict(help="""(string) 要交换的实例ID """, dest='targetInstanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 交换两个实例的域名,包括内网域名和外网域名。如果一个实例有外网域名,一个没有,则不允许交换。<br>- 仅支持SQL Server ''',
description='''
交换两个实例的域名,包括内网域名和外网域名。如果一个实例有外网域名,一个没有,则不允许交换。<br>- 仅支持SQL Server。
示例: jdc rds exchange-instance-dns --instance-id xxx --target-instance-id xxx
''',
)
def exchange_instance_dns(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ExchangeInstanceDnsRequest import ExchangeInstanceDnsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ExchangeInstanceDnsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--new-az-id'], dict(help="""(array) 新可用区ID。 如果是单机实例,只需输入一个可用区;如果是主备实例,则必须输入两个可用区ID:第一个为主节点所在可用区,第二个为备节点所在可用区。主备两个可用区可以相同,也可以不同 """, dest='newAzId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改实例的可用区,例如将实例的可用区从单可用区调整为多可用区 ''',
description='''
修改实例的可用区,例如将实例的可用区从单可用区调整为多可用区。
示例: jdc rds modify-instance-az --instance-id xxx --new-az-id '{"":""}'
''',
)
def modify_instance_az(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyInstanceAzRequest import ModifyInstanceAzRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyInstanceAzRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看当前实例已开启加密连接。 ''',
description='''
查看当前实例已开启加密连接。。
示例: jdc rds describe-ssl --instance-id xxx
''',
)
def describe_ssl(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeSSLRequest import DescribeSSLRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeSSLRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 开启数据库的加密连接, 同时会重启数据库实例 ''',
description='''
开启数据库的加密连接, 同时会重启数据库实例。
示例: jdc rds enable-ssl --instance-id xxx
''',
)
def enable_ssl(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.EnableSSLRequest import EnableSSLRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = EnableSSLRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--restore-time'], dict(help="""(string) 根据源实例的哪个时间点创建新实例 """, dest='restoreTime', required=True)),
(['--restore-schema'], dict(help="""(array: array) 需要进行单库,单表恢复的概要信息 """, dest='restoreSchema', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 根据时间点,选择单表恢复当前实例<br>- 仅支持MySQL ''',
description='''
根据时间点,选择单表恢复当前实例<br>- 仅支持MySQL。
示例: jdc rds restore-instance-by-time --instance-id xxx --restore-time xxx --restore-schema ['{"":""}']
''',
)
def restore_instance_by_time(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.RestoreInstanceByTimeRequest import RestoreInstanceByTimeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = RestoreInstanceByTimeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) Instance ID """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 开启数据库的高安全模式<br>- 仅支持MySQL ''',
description='''
开启数据库的高安全模式<br>- 仅支持MySQL。
示例: jdc rds enable-intercept --instance-id xxx
''',
)
def enable_intercept(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.EnableInterceptRequest import EnableInterceptRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = EnableInterceptRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) Instance ID """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 关闭数据库的高安全模式<br>- 仅支持MySQL ''',
description='''
关闭数据库的高安全模式<br>- 仅支持MySQL。
示例: jdc rds disable-intercept --instance-id xxx
''',
)
def disable_intercept(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DisableInterceptRequest import DisableInterceptRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DisableInterceptRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) Instance ID """, dest='instanceId', required=True)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页; """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:[10,100],且为10的整数倍 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看开启高安全模式后,当前实例的 SQL 拦截记录<br>- 仅支持MySQL ''',
description='''
查看开启高安全模式后,当前实例的 SQL 拦截记录<br>- 仅支持MySQL。
示例: jdc rds describe-intercept-result --instance-id xxx
''',
)
def describe_intercept_result(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeInterceptResultRequest import DescribeInterceptResultRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeInterceptResultRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) Instance ID """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看当前实例已开启的安全模式。如果开启数据库的高安全模式,会返回配置信息<br>- 仅支持MySQL ''',
description='''
查看当前实例已开启的安全模式。如果开启数据库的高安全模式,会返回配置信息<br>- 仅支持MySQL。
示例: jdc rds describe-intercept --instance-id xxx
''',
)
def describe_intercept(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeInterceptRequest import DescribeInterceptRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeInterceptRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页; """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为100,取值范围:[10,100],用于查询列表的接口 """, dest='pageSize', type=int, required=False)),
(['--filters'], dict(help="""(array: filter) 过滤参数,多个过滤参数之间的关系为“与”(and); 支持以下属性的过滤:logType, 支持operator选项:eq, 仅支持 MySQL,Percona,MariaDB; """, dest='filters', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取日志文件列表<br>- 仅支持PostgreSQL, MySQL, Percona, MariaDB ''',
description='''
获取日志文件列表<br>- 仅支持PostgreSQL, MySQL, Percona, MariaDB。
示例: jdc rds describe-logs --instance-id xxx
''',
)
def describe_logs(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeLogsRequest import DescribeLogsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeLogsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--log-id'], dict(help="""(string) 日志文件ID """, dest='logId', required=True)),
(['--seconds'], dict(help="""(int) 设置链接地址的过期时间,单位是秒,最长不能超过取值范围为 1 ~ 86400 秒 """, dest='seconds', type=int, required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 设置日志文件的下载链接过期时间,重新生成 PostgreSQL 的日志文件下载地址 ''',
description='''
设置日志文件的下载链接过期时间,重新生成 PostgreSQL 的日志文件下载地址。
示例: jdc rds update-log-download-urlinternal --instance-id xxx --log-id xxx --seconds 0
''',
)
def update_log_download_urlinternal(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.UpdateLogDownloadURLInternalRequest import UpdateLogDownloadURLInternalRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = UpdateLogDownloadURLInternalRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--log-id'], dict(help="""(string) 日志文件ID """, dest='logId', required=True)),
(['--seconds'], dict(help="""(int) 设置链接地址的过期时间,单位是秒,默认值是 300 秒,最长不能超过取值范围为 1 ~ 86400 秒 """, dest='seconds', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 根据日志文件的下载链接过期时间,生成日志文件下载地址 仅支持 PostgreSQL, MySQL, Percona, MariaDB ''',
description='''
根据日志文件的下载链接过期时间,生成日志文件下载地址 仅支持 PostgreSQL, MySQL, Percona, MariaDB。
示例: jdc rds describe-log-download-url --instance-id xxx --log-id xxx
''',
)
def describe_log_download_url(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeLogDownloadURLRequest import DescribeLogDownloadURLRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeLogDownloadURLRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) Instance ID """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看SQL Server实例的配置参数<br>- 仅支持SQL Server ''',
description='''
查看SQL Server实例的配置参数<br>- 仅支持SQL Server。
示例: jdc rds describe-parameters --instance-id xxx
''',
)
def describe_parameters(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeParametersRequest import DescribeParametersRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeParametersRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) Instance ID """, dest='instanceId', required=True)),
(['--parameters'], dict(help="""(array: parameter) 修改的实例参数 """, dest='parameters', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改SQL Server实例的配置参数,目前支持以下参数:max_worker_threads,max_degree_of_parallelism,max_server_memory_(MB)。 部分参数修改后,需要重启才能生效,具体可以参考微软的相关文档。<br>- 仅支持SQL Server ''',
description='''
修改SQL Server实例的配置参数,目前支持以下参数:max_worker_threads,max_degree_of_parallelism,max_server_memory_(MB)。 部分参数修改后,需要重启才能生效,具体可以参考微软的相关文档。<br>- 仅支持SQL Server。
示例: jdc rds modify-parameters --instance-id xxx --parameters ['{"":""}']
''',
)
def modify_parameters(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyParametersRequest import ModifyParametersRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyParametersRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--parameter-group-id'], dict(help="""(string) Parameter Group ID """, dest='parameterGroupId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 删除参数组<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL ''',
description='''
删除参数组<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL。
示例: jdc rds delete-parameter-group --parameter-group-id xxx
''',
)
def delete_parameter_group(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DeleteParameterGroupRequest import DeleteParameterGroupRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DeleteParameterGroupRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--parameter-group-id'], dict(help="""(string) Parameter Group ID """, dest='parameterGroupId', required=True)),
(['--parameter-group-name'], dict(help="""(string) 参数组名称 """, dest='parameterGroupName', required=True)),
(['--description'], dict(help="""(string) 参数组描述 """, dest='description', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改参数组名称,描述<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL ''',
description='''
修改参数组名称,描述<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL。
示例: jdc rds modify-parameter-group-attribute --parameter-group-id xxx --parameter-group-name xxx
''',
)
def modify_parameter_group_attribute(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyParameterGroupAttributeRequest import ModifyParameterGroupAttributeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyParameterGroupAttributeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页; """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:[10,100],且为10的整数倍 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取当前账号下所有的参数组列表<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL ''',
description='''
获取当前账号下所有的参数组列表<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL。
示例: jdc rds describe-parameter-groups
''',
)
def describe_parameter_groups(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeParameterGroupsRequest import DescribeParameterGroupsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeParameterGroupsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--engine'], dict(help="""(string) 实例引擎类型,参见[枚举参数定义](../Enum-Definitions/Enum-Definitions.md) """, dest='engine', required=True)),
(['--engine-version'], dict(help="""(string) 实例引擎版本,参见[枚举参数定义](../Enum-Definitions/Enum-Definitions.md) """, dest='engineVersion', required=True)),
(['--parameter-group-name'], dict(help="""(string) 参数组的名字 """, dest='parameterGroupName', required=True)),
(['--description'], dict(help="""(string) 参数组的描述 """, dest='description', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 创建一个参数组<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL ''',
description='''
创建一个参数组<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL。
示例: jdc rds create-parameter-group --engine xxx --engine-version xxx --parameter-group-name xxx
''',
)
def create_parameter_group(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CreateParameterGroupRequest import CreateParameterGroupRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CreateParameterGroupRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--parameter-group-id'], dict(help="""(string) Parameter Group ID """, dest='parameterGroupId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看参数组的参数<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL ''',
description='''
查看参数组的参数<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL。
示例: jdc rds describe-parameter-group-parameters --parameter-group-id xxx
''',
)
def describe_parameter_group_parameters(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeParameterGroupParametersRequest import DescribeParameterGroupParametersRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeParameterGroupParametersRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--parameter-group-id'], dict(help="""(string) Parameter Group ID """, dest='parameterGroupId', required=True)),
(['--parameters'], dict(help="""(array: array) 修改的参数 """, dest='parameters', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改参数组的参数<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL ''',
description='''
修改参数组的参数<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL。
示例: jdc rds modify-parameter-group-parameters --parameter-group-id xxx --parameters ['{"":""}']
''',
)
def modify_parameter_group_parameters(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyParameterGroupParametersRequest import ModifyParameterGroupParametersRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyParameterGroupParametersRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--parameter-group-id'], dict(help="""(string) Parameter Group ID """, dest='parameterGroupId', required=True)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页; """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:[10,100],且为10的整数倍 """, dest='pageSize', type=int, required=False)),
(['--start-time'], dict(help="""(string) 查询开始时间,格式为:YYYY-MM-DD HH:mm:ss """, dest='startTime', required=False)),
(['--end-time'], dict(help="""(string) 查询结束时间,格式为:YYYY-MM-DD HH:mm:ss """, dest='endTime', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看参数的修改历史<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL ''',
description='''
查看参数的修改历史<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL。
示例: jdc rds describe-parameter-modify-records --parameter-group-id xxx
''',
)
def describe_parameter_modify_records(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeParameterModifyRecordsRequest import DescribeParameterModifyRecordsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeParameterModifyRecordsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--parameter-group-id'], dict(help="""(string) 参数组ID """, dest='parameterGroupId', required=True)),
(['--parameter-group-name'], dict(help="""(string) 参数组的名字 """, dest='parameterGroupName', required=True)),
(['--description'], dict(help="""(string) 参数组的描述 """, dest='description', required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 拷贝参数组 ''',
description='''
拷贝参数组。
示例: jdc rds copy-parameter-group --parameter-group-id xxx --parameter-group-name xxx
''',
)
def copy_parameter_group(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.CopyParameterGroupRequest import CopyParameterGroupRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = CopyParameterGroupRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) Region ID """, dest='regionId', required=False)),
(['--parameter-group-id'], dict(help="""(string) Parameter Group ID """, dest='parameterGroupId', required=True)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页 """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:[10,100],且为10的整数倍 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看参数组绑定的云数据库实例<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL ''',
description='''
查看参数组绑定的云数据库实例<br>- 仅支持MySQL,Percona,MariaDB,PostgreSQL。
示例: jdc rds describe-parameter-group-attached-instances --parameter-group-id xxx
''',
)
def describe_parameter_group_attached_instances(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeParameterGroupAttachedInstancesRequest import DescribeParameterGroupAttachedInstancesRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeParameterGroupAttachedInstancesRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--query-type'], dict(help="""(string) 查询类型,不同的查询类型按照相应的字段从高到低返回结果。<br>支持如下类型:<br>Missing:缺失索引<br>Size:索引大小,单位KB<br>Updates:索引更新次数<br>Scans:表扫描次数<br>Used:最少使用<br> """, dest='queryType', required=True)),
(['--db'], dict(help="""(string) 需要查询的数据库名,多个数据库名之间用英文逗号分隔,默认所有数据库 """, dest='db', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,1000)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页。 """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为50,取值范围:[1,100],只能为10的倍数,用于查询列表的接口 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 根据用户定义的查询条件,获取索引性能的统计信息,并提供缺失索引及索引创建建议。用户可以根据这些信息查找与索引相关的性能瓶颈,并进行优化。<br>- 仅支持SQL Server ''',
description='''
根据用户定义的查询条件,获取索引性能的统计信息,并提供缺失索引及索引创建建议。用户可以根据这些信息查找与索引相关的性能瓶颈,并进行优化。<br>- 仅支持SQL Server。
示例: jdc rds describe-index-performance --instance-id xxx --query-type xxx
''',
)
def describe_index_performance(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeIndexPerformanceRequest import DescribeIndexPerformanceRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeIndexPerformanceRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--query-type'], dict(help="""(string) 查询类型,不同的查询类型按照相应的字段从高到低返回结果。<br>支持如下类型:<br>ExecutionCount:执行次数<br>LastRows:上次返回行数<br>ElapsedTime:平均执行时间<br>CPUTime:平均CPU时间<br>LogicalReads:平均逻辑读<br>LogicalWrites:平均逻辑写<br>PhysicalReads:平均物理读<br> """, dest='queryType', required=True)),
(['--threshold'], dict(help="""(int) 只返回查询条件大于等于threshold的记录,默认为0 """, dest='threshold', type=int, required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,1000]。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页。 """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为50,取值范围:[1,100],只能为10的倍数,用于查询列表的接口 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 根据用户定义的查询条件,获取SQL执行的性能统计信息,例如慢SQL等。用户可以根据这些信息查找与SQL执行相关的性能瓶颈,并进行优化。<br>- 仅支持SQL Server ''',
description='''
根据用户定义的查询条件,获取SQL执行的性能统计信息,例如慢SQL等。用户可以根据这些信息查找与SQL执行相关的性能瓶颈,并进行优化。<br>- 仅支持SQL Server。
示例: jdc rds describe-query-performance --instance-id xxx --query-type xxx
''',
)
def describe_query_performance(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeQueryPerformanceRequest import DescribeQueryPerformanceRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeQueryPerformanceRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--start-time'], dict(help="""(string) 慢日志开始时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间到当前时间不能大于 7 天, 开始时间不能大于结束时间,结束时间不能大于当前时间 """, dest='startTime', required=True)),
(['--end-time'], dict(help="""(string) 慢日志结束时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间到当前时间不能大于 7 天, 开始时间不能大于结束时间,结束时间不能大于当前时间 """, dest='endTime', required=True)),
(['--db-name'], dict(help="""(string) 查询哪个数据库的慢日志,不填表示返回所有数据库的慢日志 """, dest='dbName', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,1000)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页。 """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:10、20、30、50、100 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查询MySQL实例的慢日志的详细信息。<br>- 仅支持MySQL ''',
description='''
查询MySQL实例的慢日志的详细信息。<br>- 仅支持MySQL。
示例: jdc rds describe-slow-log-attributes --instance-id xxx --start-time xxx --end-time xxx
''',
)
def describe_slow_log_attributes(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeSlowLogAttributesRequest import DescribeSlowLogAttributesRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeSlowLogAttributesRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--start-time'], dict(help="""(string) 慢日志开始时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间到当前时间不能大于 7 天,开始时间不能大于结束时间,结束时间不能大于当前时间 """, dest='startTime', required=True)),
(['--end-time'], dict(help="""(string) 慢日志结束时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间到当前时间不能大于 7 天,开始时间不能大于结束时间,结束时间不能大于当前时间 """, dest='endTime', required=True)),
(['--db-name'], dict(help="""(string) 查询哪个数据库的慢日志,不填表示返回所有数据库的慢日志 """, dest='dbName', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,1000)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页。 """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:10、20、30、50、100 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查询MySQL实例的慢日志的概要信息。<br>- 仅支持MySQL ''',
description='''
查询MySQL实例的慢日志的概要信息。<br>- 仅支持MySQL。
示例: jdc rds describe-slow-logs --instance-id xxx --start-time xxx --end-time xxx
''',
)
def describe_slow_logs(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeSlowLogsRequest import DescribeSlowLogsRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeSlowLogsRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--db'], dict(help="""(string) 需要查询的数据库名,多个数据库名之间用英文逗号分隔,默认所有数据库 """, dest='db', required=False)),
(['--threshold'], dict(help="""(int) 返回执行时间大于等于threshold的记录,默认10,单位秒 """, dest='threshold', type=int, required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,1000)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页。 """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为50,取值范围:[1,100],只能为10的倍数 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 根据用户定义的查询条件,获取正在执行中的SQL执行的性能信息。用户可以根据这些信息查找与SQL执行相关的性能瓶颈,并进行优化。<br>- 仅支持SQL Server ''',
description='''
根据用户定义的查询条件,获取正在执行中的SQL执行的性能信息。用户可以根据这些信息查找与SQL执行相关的性能瓶颈,并进行优化。<br>- 仅支持SQL Server。
示例: jdc rds describe-active-query-performance --instance-id xxx
''',
)
def describe_active_query_performance(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeActiveQueryPerformanceRequest import DescribeActiveQueryPerformanceRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeActiveQueryPerformanceRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--start-time'], dict(help="""(string) 慢日志开始时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间到当前时间不能大于 7 天,开始时间不能大于结束时间,结束时间不能大于当前时间 """, dest='startTime', required=True)),
(['--end-time'], dict(help="""(string) 慢日志结束时间,格式为:YYYY-MM-DD HH:mm:ss,开始时间到当前时间不能大于 7 天,开始时间不能大于结束时间,结束时间不能大于当前时间 """, dest='endTime', required=True)),
(['--db-name'], dict(help="""(string) 查询哪个数据库的慢日志,不填表示返回所有数据库的慢日志 """, dest='dbName', required=False)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,1000)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页。 """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为10,取值范围:10、20、30、50、100 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查询PostgreSQL实例的错误日志的概要信息。<br>- 仅支持PostgreSQL ''',
description='''
查询PostgreSQL实例的错误日志的概要信息。<br>- 仅支持PostgreSQL。
示例: jdc rds describe-error-log --instance-id xxx --start-time xxx --end-time xxx
''',
)
def describe_error_log(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeErrorLogRequest import DescribeErrorLogRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeErrorLogRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--db-name'], dict(help="""(string) 库名称 """, dest='dbName', required=True)),
(['--page-number'], dict(help="""(int) 显示数据的页码,默认为1,取值范围:[-1,∞)。pageNumber为-1时,返回所有数据页码;超过总页数时,显示最后一页; """, dest='pageNumber', type=int, required=False)),
(['--page-size'], dict(help="""(int) 每页显示的数据条数,默认为100,取值范围:[10,100],用于查询列表的接口 """, dest='pageSize', type=int, required=False)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 获取当前实例的指定库的表列表信息 - 仅支持 MySQL,Percona,MariaDB ''',
description='''
获取当前实例的指定库的表列表信息 - 仅支持 MySQL,Percona,MariaDB。
示例: jdc rds describe-tables --instance-id xxx --db-name xxx
''',
)
def describe_tables(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeTablesRequest import DescribeTablesRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeTablesRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看当前实例是否开启TDE ''',
description='''
查看当前实例是否开启TDE。
示例: jdc rds describe-tde --instance-id xxx
''',
)
def describe_tde(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeTdeRequest import DescribeTdeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeTdeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 开启数据库的TDE功能 ''',
description='''
开启数据库的TDE功能。
示例: jdc rds enable-tde --instance-id xxx
''',
)
def enable_tde(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.EnableTdeRequest import EnableTdeRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = EnableTdeRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 查看RDS实例当前白名单。白名单是允许访问当前实例的IP/IP段列表,缺省情况下,白名单对本VPC开放。如果用户开启了外网访问的功能,还需要对外网的IP配置白名单。 ''',
description='''
查看RDS实例当前白名单。白名单是允许访问当前实例的IP/IP段列表,缺省情况下,白名单对本VPC开放。如果用户开启了外网访问的功能,还需要对外网的IP配置白名单。。
示例: jdc rds describe-white-list --instance-id xxx
''',
)
def describe_white_list(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.DescribeWhiteListRequest import DescribeWhiteListRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = DescribeWhiteListRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--region-id'], dict(help="""(string) 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) """, dest='regionId', required=False)),
(['--instance-id'], dict(help="""(string) RDS 实例ID,唯一标识一个RDS实例 """, dest='instanceId', required=True)),
(['--ips'], dict(help="""(string) IP或IP段,不同的IP/IP段之间用英文逗号分隔,例如0.0.0.0/0,192.168.0.10 """, dest='ips', required=True)),
(['--input-json'], dict(help='(json) 以json字符串或文件绝对路径形式作为输入参数。\n字符串方式举例:--input-json \'{"field":"value"}\';\n文件格式举例:--input-json file:///xxxx.json', dest='input_json', required=False)),
(['--headers'], dict(help="""(json) 用户自定义Header,举例:'{"x-jdcloud-security-token":"abc","test":"123"}'""", dest='headers', required=False)),
],
formatter_class=RawTextHelpFormatter,
help=''' 修改允许访问实例的IP白名单。白名单是允许访问当前实例的IP/IP段列表,缺省情况下,白名单对本VPC开放。如果用户开启了外网访问的功能,还需要对外网的IP配置白名单。 ''',
description='''
修改允许访问实例的IP白名单。白名单是允许访问当前实例的IP/IP段列表,缺省情况下,白名单对本VPC开放。如果用户开启了外网访问的功能,还需要对外网的IP配置白名单。。
示例: jdc rds modify-white-list --instance-id xxx --ips xxx
''',
)
def modify_white_list(self):
client_factory = ClientFactory('rds')
client = client_factory.get(self.app)
if client is None:
return
try:
from jdcloud_sdk.services.rds.apis.ModifyWhiteListRequest import ModifyWhiteListRequest
params_dict = collect_user_args(self.app)
headers = collect_user_headers(self.app)
req = ModifyWhiteListRequest(params_dict, headers)
resp = client.send(req)
Printer.print_result(resp)
except ImportError:
print('{"error":"This api is not supported, please use the newer version"}')
except Exception as e:
print(e)
@expose(
arguments=[
(['--api'], dict(help="""(string) api name """, choices=['describe-accounts','create-account','describe-account-privilege','delete-account','grant-privilege','revoke-privilege','reset-password','create-super-account','grant-account-privilege','describe-audit','create-audit','delete-audit','describe-audit-options','modify-audit','describe-audit-files','describe-audit-download-url','enable-audit','disable-audit','describe-audit-result','describe-azs','describe-backups','create-backup','delete-backup','describe-backup-download-url','describe-backup-synchronicities','create-backup-synchronicity','delete-backup-synchronicity','create-instance-by-time-in-cross-region','describe-binlogs','describe-binlog-download-url','clear-binlogs','alter-table-with-online-ddl','describe-privilege','describe-databases','create-database','delete-database','restore-database-from-backup','restore-database-from-file','restore-database-from-oss','describe-error-logs','describe-import-files','get-upload-key','set-import-file-shared','delete-import-file','describe-instances','create-instance','describe-instance-attributes','delete-instance','describe-backup-policy','modify-backup-policy','modify-instance-name','failover-instance','reboot-instance','enable-internet-access','disable-internet-access','restore-instance','create-instance-from-backup','modify-instance-spec','create-instance-by-time','create-roinstance','modify-connection-mode','describe-latest-restore-time','modify-parameter-group','exchange-instance-dns','modify-instance-az','describe-ssl','enable-ssl','restore-instance-by-time','enable-intercept','disable-intercept','describe-intercept-result','describe-intercept','describe-logs','update-log-download-urlinternal','describe-log-download-url','describe-parameters','modify-parameters','delete-parameter-group','modify-parameter-group-attribute','describe-parameter-groups','create-parameter-group','describe-parameter-group-parameters','modify-parameter-group-parameters','describe-parameter-modify-records','copy-parameter-group','describe-parameter-group-attached-instances','describe-index-performance','describe-query-performance','describe-slow-log-attributes','describe-slow-logs','describe-active-query-performance','describe-error-log','describe-tables','describe-tde','enable-tde','describe-white-list','modify-white-list',], required=True)),
],
formatter_class=RawTextHelpFormatter,
help=''' 生成单个API接口的json骨架空字符串 ''',
description='''
生成单个API接口的json骨架空字符串。
示例: jdc nc generate-skeleton --api describeContainer ''',
)
def generate_skeleton(self):
skeleton = Skeleton('rds', self.app.pargs.api)
skeleton.show()
| 56.806036
| 2,375
| 0.618556
| 20,118
| 193,879
| 5.891391
| 0.056715
| 0.036989
| 0.033782
| 0.026594
| 0.824279
| 0.800393
| 0.77898
| 0.761877
| 0.748825
| 0.737241
| 0
| 0.006204
| 0.21352
| 193,879
| 3,412
| 2,376
| 56.822685
| 0.771002
| 0.00327
| 0
| 0.742498
| 0
| 0.064537
| 0.410117
| 0.191209
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031623
| false
| 0.003227
| 0.068732
| 0
| 0.132301
| 0.094224
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6b35ce69c9cba33721d4e5807528af3fc2343542
| 58,043
|
py
|
Python
|
vargfacenet_tensorflow.py
|
pmathewjacob/insightface-attendance
|
447b669e3d176bb1c78a6108334d6470a8fb25a8
|
[
"MIT"
] | 5
|
2019-12-02T03:32:29.000Z
|
2020-06-14T19:22:40.000Z
|
vargfacenet_tensorflow.py
|
pmathewjacob/insightface-attendance
|
447b669e3d176bb1c78a6108334d6470a8fb25a8
|
[
"MIT"
] | 13
|
2020-03-24T17:53:27.000Z
|
2022-02-10T00:49:34.000Z
|
vargfacenet_tensorflow.py
|
pmathewjacob/insightface-attendance
|
447b669e3d176bb1c78a6108334d6470a8fb25a8
|
[
"MIT"
] | 6
|
2019-12-02T16:35:57.000Z
|
2020-08-21T08:44:27.000Z
|
import tensorflow as tf
__weights_dict = dict()
is_train = False
def load_weights(weight_file):
import numpy as np
if weight_file == None:
return
try:
weights_dict = np.load(weight_file).item()
except:
weights_dict = np.load(weight_file, encoding='bytes').item()
return weights_dict
def KitModel(weight_file = None):
global __weights_dict
__weights_dict = load_weights(weight_file)
print(__weights_dict)
data = tf.placeholder(tf.float32, shape = (None, 112, 112, 3), name = 'data')
minusscalar0_second = tf.constant(__weights_dict['minusscalar0_second']['value'], dtype=tf.float32, name='minusscalar0_second')
mulscalar0_second = tf.constant(__weights_dict['mulscalar0_second']['value'], dtype=tf.float32, name='mulscalar0_second')
minusscalar0 = data - minusscalar0_second
mulscalar0 = minusscalar0 * mulscalar0_second
vargface_head_conv1_pad = tf.pad(mulscalar0, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_head_conv1 = convolution(vargface_head_conv1_pad, group=1, strides=[1, 1], padding='VALID', name='vargface_head_conv1')
vargface_head_conv1_bn = batch_normalization(vargface_head_conv1, variance_epsilon=1.9999999494757503e-05, name='vargface_head_conv1_bn')
vargface_head_conv1_act = prelu(vargface_head_conv1_bn, name='vargface_head_conv1_act')
vargface_head_head_pooling_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_head_conv1_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_head_head_pooling_sep1_data_conv2d_depthwise = convolution(vargface_head_head_pooling_sep1_data_conv2d_depthwise_pad, group=5, strides=[2, 2], padding='VALID', name='vargface_head_head_pooling_sep1_data_conv2d_depthwise')
vargface_head_head_pooling_shortcut_conv2d_depthwise_pad = tf.pad(vargface_head_conv1_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_head_head_pooling_shortcut_conv2d_depthwise = convolution(vargface_head_head_pooling_shortcut_conv2d_depthwise_pad, group=5, strides=[2, 2], padding='VALID', name='vargface_head_head_pooling_shortcut_conv2d_depthwise')
vargface_head_head_pooling_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_head_head_pooling_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_head_head_pooling_sep1_data_conv2d_depthwise_bn')
vargface_head_head_pooling_shortcut_conv2d_depthwise_bn = batch_normalization(vargface_head_head_pooling_shortcut_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_head_head_pooling_shortcut_conv2d_depthwise_bn')
vargface_head_head_pooling_sep1_data_conv2d_depthwise_act = prelu(vargface_head_head_pooling_sep1_data_conv2d_depthwise_bn, name='vargface_head_head_pooling_sep1_data_conv2d_depthwise_act')
vargface_head_head_pooling_shortcut_conv2d_depthwise_act = prelu(vargface_head_head_pooling_shortcut_conv2d_depthwise_bn, name='vargface_head_head_pooling_shortcut_conv2d_depthwise_act')
vargface_head_head_pooling_sep1_data_conv2d_pointwise = convolution(vargface_head_head_pooling_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_head_head_pooling_sep1_data_conv2d_pointwise')
vargface_head_head_pooling_shortcut_conv2d_pointwise = convolution(vargface_head_head_pooling_shortcut_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_head_head_pooling_shortcut_conv2d_pointwise')
vargface_head_head_pooling_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_head_head_pooling_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_head_head_pooling_sep1_data_conv2d_pointwise_bn')
vargface_head_head_pooling_shortcut_conv2d_pointwise_bn = batch_normalization(vargface_head_head_pooling_shortcut_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_head_head_pooling_shortcut_conv2d_pointwise_bn')
vargface_head_head_pooling_sep1_data_conv2d_pointwise_act = prelu(vargface_head_head_pooling_sep1_data_conv2d_pointwise_bn, name='vargface_head_head_pooling_sep1_data_conv2d_pointwise_act')
vargface_head_head_pooling_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_head_head_pooling_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_head_head_pooling_sep2_data_conv2d_depthwise = convolution(vargface_head_head_pooling_sep2_data_conv2d_depthwise_pad, group=5, strides=[1, 1], padding='VALID', name='vargface_head_head_pooling_sep2_data_conv2d_depthwise')
vargface_head_head_pooling_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_head_head_pooling_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_head_head_pooling_sep2_data_conv2d_depthwise_bn')
vargface_head_head_pooling_sep2_data_conv2d_depthwise_act = prelu(vargface_head_head_pooling_sep2_data_conv2d_depthwise_bn, name='vargface_head_head_pooling_sep2_data_conv2d_depthwise_act')
vargface_head_head_pooling_sep2_data_conv2d_pointwise = convolution(vargface_head_head_pooling_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_head_head_pooling_sep2_data_conv2d_pointwise')
vargface_head_head_pooling_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_head_head_pooling_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_head_head_pooling_sep2_data_conv2d_pointwise_bn')
plus0 = vargface_head_head_pooling_sep2_data_conv2d_pointwise_bn + vargface_head_head_pooling_shortcut_conv2d_pointwise_bn
vargface_head_head_pooling_out_data_act = prelu(plus0, name='vargface_head_head_pooling_out_data_act')
vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise_pad = tf.pad(vargface_head_head_pooling_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise = convolution(vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise_pad, group=5, strides=[2, 2], padding='VALID', name='vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise')
vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise_pad = tf.pad(vargface_head_head_pooling_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise = convolution(vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise_pad, group=5, strides=[2, 2], padding='VALID', name='vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise')
vargface_stage_2_unit_1_shortcut_conv2d_depthwise_pad = tf.pad(vargface_head_head_pooling_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_2_unit_1_shortcut_conv2d_depthwise = convolution(vargface_stage_2_unit_1_shortcut_conv2d_depthwise_pad, group=5, strides=[2, 2], padding='VALID', name='vargface_stage_2_unit_1_shortcut_conv2d_depthwise')
vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise_bn = batch_normalization(vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise_bn')
vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise_bn = batch_normalization(vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise_bn')
vargface_stage_2_unit_1_shortcut_conv2d_depthwise_bn = batch_normalization(vargface_stage_2_unit_1_shortcut_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_1_shortcut_conv2d_depthwise_bn')
vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise_act = prelu(vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise_bn, name='vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise_act')
vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise_act = prelu(vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise_bn, name='vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise_act')
vargface_stage_2_unit_1_shortcut_conv2d_depthwise_act = prelu(vargface_stage_2_unit_1_shortcut_conv2d_depthwise_bn, name='vargface_stage_2_unit_1_shortcut_conv2d_depthwise_act')
vargface_stage_2_unit_1_sep1_data_branch_conv2d_pointwise = convolution(vargface_stage_2_unit_1_sep1_data_branch_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_1_sep1_data_branch_conv2d_pointwise')
vargface_stage_2_unit_1_sep2_data_branch_conv2d_pointwise = convolution(vargface_stage_2_unit_1_sep2_data_branch_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_1_sep2_data_branch_conv2d_pointwise')
vargface_stage_2_unit_1_shortcut_conv2d_pointwise = convolution(vargface_stage_2_unit_1_shortcut_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_1_shortcut_conv2d_pointwise')
vargface_stage_2_unit_1_sep1_data_branch_conv2d_pointwise_bn = batch_normalization(vargface_stage_2_unit_1_sep1_data_branch_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_1_sep1_data_branch_conv2d_pointwise_bn')
vargface_stage_2_unit_1_sep2_data_branch_conv2d_pointwise_bn = batch_normalization(vargface_stage_2_unit_1_sep2_data_branch_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_1_sep2_data_branch_conv2d_pointwise_bn')
vargface_stage_2_unit_1_shortcut_conv2d_pointwise_bn = batch_normalization(vargface_stage_2_unit_1_shortcut_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_1_shortcut_conv2d_pointwise_bn')
plus1 = vargface_stage_2_unit_1_sep1_data_branch_conv2d_pointwise_bn + vargface_stage_2_unit_1_sep2_data_branch_conv2d_pointwise_bn
vargface_stage_2_unit_1_sep1_data_act = prelu(plus1, name='vargface_stage_2_unit_1_sep1_data_act')
vargface_stage_2_unit_1_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_2_unit_1_sep1_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_2_unit_1_sep2_data_conv2d_depthwise = convolution(vargface_stage_2_unit_1_sep2_data_conv2d_depthwise_pad, group=10, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_1_sep2_data_conv2d_depthwise')
vargface_stage_2_unit_1_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_2_unit_1_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_1_sep2_data_conv2d_depthwise_bn')
vargface_stage_2_unit_1_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_2_unit_1_sep2_data_conv2d_depthwise_bn, name='vargface_stage_2_unit_1_sep2_data_conv2d_depthwise_act')
vargface_stage_2_unit_1_sep2_data_conv2d_pointwise = convolution(vargface_stage_2_unit_1_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_1_sep2_data_conv2d_pointwise')
vargface_stage_2_unit_1_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_2_unit_1_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_1_sep2_data_conv2d_pointwise_bn')
plus2 = vargface_stage_2_unit_1_sep2_data_conv2d_pointwise_bn + vargface_stage_2_unit_1_shortcut_conv2d_pointwise_bn
vargface_stage_2_unit_1_out_data_act = prelu(plus2, name='vargface_stage_2_unit_1_out_data_act')
vargface_stage_2_unit_2_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_2_unit_1_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_2_unit_2_sep1_data_conv2d_depthwise = convolution(vargface_stage_2_unit_2_sep1_data_conv2d_depthwise_pad, group=10, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_2_sep1_data_conv2d_depthwise')
vargface_stage_2_unit_2_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_2_unit_2_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_2_sep1_data_conv2d_depthwise_bn')
vargface_stage_2_unit_2_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_2_unit_2_sep1_data_conv2d_depthwise_bn, name='vargface_stage_2_unit_2_sep1_data_conv2d_depthwise_act')
vargface_stage_2_unit_2_sep1_data_conv2d_pointwise = convolution(vargface_stage_2_unit_2_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_2_sep1_data_conv2d_pointwise')
vargface_stage_2_unit_2_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_2_unit_2_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_2_sep1_data_conv2d_pointwise_bn')
vargface_stage_2_unit_2_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_2_unit_2_sep1_data_conv2d_pointwise_bn, name='vargface_stage_2_unit_2_sep1_data_conv2d_pointwise_act')
vargface_stage_2_unit_2_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_2_unit_2_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_2_unit_2_sep2_data_conv2d_depthwise = convolution(vargface_stage_2_unit_2_sep2_data_conv2d_depthwise_pad, group=10, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_2_sep2_data_conv2d_depthwise')
vargface_stage_2_unit_2_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_2_unit_2_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_2_sep2_data_conv2d_depthwise_bn')
vargface_stage_2_unit_2_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_2_unit_2_sep2_data_conv2d_depthwise_bn, name='vargface_stage_2_unit_2_sep2_data_conv2d_depthwise_act')
vargface_stage_2_unit_2_sep2_data_conv2d_pointwise = convolution(vargface_stage_2_unit_2_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_2_sep2_data_conv2d_pointwise')
vargface_stage_2_unit_2_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_2_unit_2_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_2_sep2_data_conv2d_pointwise_bn')
plus3 = vargface_stage_2_unit_2_sep2_data_conv2d_pointwise_bn + vargface_stage_2_unit_1_out_data_act
vargface_stage_2_unit_2_out_data_act = prelu(plus3, name='vargface_stage_2_unit_2_out_data_act')
vargface_stage_2_unit_3_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_2_unit_2_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_2_unit_3_sep1_data_conv2d_depthwise = convolution(vargface_stage_2_unit_3_sep1_data_conv2d_depthwise_pad, group=10, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_3_sep1_data_conv2d_depthwise')
vargface_stage_2_unit_3_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_2_unit_3_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_3_sep1_data_conv2d_depthwise_bn')
vargface_stage_2_unit_3_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_2_unit_3_sep1_data_conv2d_depthwise_bn, name='vargface_stage_2_unit_3_sep1_data_conv2d_depthwise_act')
vargface_stage_2_unit_3_sep1_data_conv2d_pointwise = convolution(vargface_stage_2_unit_3_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_3_sep1_data_conv2d_pointwise')
vargface_stage_2_unit_3_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_2_unit_3_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_3_sep1_data_conv2d_pointwise_bn')
vargface_stage_2_unit_3_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_2_unit_3_sep1_data_conv2d_pointwise_bn, name='vargface_stage_2_unit_3_sep1_data_conv2d_pointwise_act')
vargface_stage_2_unit_3_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_2_unit_3_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_2_unit_3_sep2_data_conv2d_depthwise = convolution(vargface_stage_2_unit_3_sep2_data_conv2d_depthwise_pad, group=10, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_3_sep2_data_conv2d_depthwise')
vargface_stage_2_unit_3_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_2_unit_3_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_3_sep2_data_conv2d_depthwise_bn')
vargface_stage_2_unit_3_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_2_unit_3_sep2_data_conv2d_depthwise_bn, name='vargface_stage_2_unit_3_sep2_data_conv2d_depthwise_act')
vargface_stage_2_unit_3_sep2_data_conv2d_pointwise = convolution(vargface_stage_2_unit_3_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_2_unit_3_sep2_data_conv2d_pointwise')
vargface_stage_2_unit_3_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_2_unit_3_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_2_unit_3_sep2_data_conv2d_pointwise_bn')
plus4 = vargface_stage_2_unit_3_sep2_data_conv2d_pointwise_bn + vargface_stage_2_unit_2_out_data_act
vargface_stage_2_unit_3_out_data_act = prelu(plus4, name='vargface_stage_2_unit_3_out_data_act')
vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise_pad = tf.pad(vargface_stage_2_unit_3_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise = convolution(vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise_pad, group=10, strides=[2, 2], padding='VALID', name='vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise')
vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise_pad = tf.pad(vargface_stage_2_unit_3_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise = convolution(vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise_pad, group=10, strides=[2, 2], padding='VALID', name='vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise')
vargface_stage_3_unit_1_shortcut_conv2d_depthwise_pad = tf.pad(vargface_stage_2_unit_3_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_1_shortcut_conv2d_depthwise = convolution(vargface_stage_3_unit_1_shortcut_conv2d_depthwise_pad, group=10, strides=[2, 2], padding='VALID', name='vargface_stage_3_unit_1_shortcut_conv2d_depthwise')
vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise_bn')
vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise_bn')
vargface_stage_3_unit_1_shortcut_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_1_shortcut_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_1_shortcut_conv2d_depthwise_bn')
vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise_act = prelu(vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise_bn, name='vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise_act')
vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise_act = prelu(vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise_bn, name='vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise_act')
vargface_stage_3_unit_1_shortcut_conv2d_depthwise_act = prelu(vargface_stage_3_unit_1_shortcut_conv2d_depthwise_bn, name='vargface_stage_3_unit_1_shortcut_conv2d_depthwise_act')
vargface_stage_3_unit_1_sep1_data_branch_conv2d_pointwise = convolution(vargface_stage_3_unit_1_sep1_data_branch_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_1_sep1_data_branch_conv2d_pointwise')
vargface_stage_3_unit_1_sep2_data_branch_conv2d_pointwise = convolution(vargface_stage_3_unit_1_sep2_data_branch_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_1_sep2_data_branch_conv2d_pointwise')
vargface_stage_3_unit_1_shortcut_conv2d_pointwise = convolution(vargface_stage_3_unit_1_shortcut_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_1_shortcut_conv2d_pointwise')
vargface_stage_3_unit_1_sep1_data_branch_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_1_sep1_data_branch_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_1_sep1_data_branch_conv2d_pointwise_bn')
vargface_stage_3_unit_1_sep2_data_branch_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_1_sep2_data_branch_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_1_sep2_data_branch_conv2d_pointwise_bn')
vargface_stage_3_unit_1_shortcut_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_1_shortcut_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_1_shortcut_conv2d_pointwise_bn')
plus5 = vargface_stage_3_unit_1_sep1_data_branch_conv2d_pointwise_bn + vargface_stage_3_unit_1_sep2_data_branch_conv2d_pointwise_bn
vargface_stage_3_unit_1_sep1_data_act = prelu(plus5, name='vargface_stage_3_unit_1_sep1_data_act')
vargface_stage_3_unit_1_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_1_sep1_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_1_sep2_data_conv2d_depthwise = convolution(vargface_stage_3_unit_1_sep2_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_1_sep2_data_conv2d_depthwise')
vargface_stage_3_unit_1_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_1_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_1_sep2_data_conv2d_depthwise_bn')
vargface_stage_3_unit_1_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_1_sep2_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_1_sep2_data_conv2d_depthwise_act')
vargface_stage_3_unit_1_sep2_data_conv2d_pointwise = convolution(vargface_stage_3_unit_1_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_1_sep2_data_conv2d_pointwise')
vargface_stage_3_unit_1_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_1_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_1_sep2_data_conv2d_pointwise_bn')
plus6 = vargface_stage_3_unit_1_sep2_data_conv2d_pointwise_bn + vargface_stage_3_unit_1_shortcut_conv2d_pointwise_bn
vargface_stage_3_unit_1_out_data_act = prelu(plus6, name='vargface_stage_3_unit_1_out_data_act')
vargface_stage_3_unit_2_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_1_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_2_sep1_data_conv2d_depthwise = convolution(vargface_stage_3_unit_2_sep1_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_2_sep1_data_conv2d_depthwise')
vargface_stage_3_unit_2_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_2_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_2_sep1_data_conv2d_depthwise_bn')
vargface_stage_3_unit_2_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_2_sep1_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_2_sep1_data_conv2d_depthwise_act')
vargface_stage_3_unit_2_sep1_data_conv2d_pointwise = convolution(vargface_stage_3_unit_2_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_2_sep1_data_conv2d_pointwise')
vargface_stage_3_unit_2_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_2_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_2_sep1_data_conv2d_pointwise_bn')
vargface_stage_3_unit_2_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_3_unit_2_sep1_data_conv2d_pointwise_bn, name='vargface_stage_3_unit_2_sep1_data_conv2d_pointwise_act')
vargface_stage_3_unit_2_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_2_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_2_sep2_data_conv2d_depthwise = convolution(vargface_stage_3_unit_2_sep2_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_2_sep2_data_conv2d_depthwise')
vargface_stage_3_unit_2_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_2_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_2_sep2_data_conv2d_depthwise_bn')
vargface_stage_3_unit_2_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_2_sep2_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_2_sep2_data_conv2d_depthwise_act')
vargface_stage_3_unit_2_sep2_data_conv2d_pointwise = convolution(vargface_stage_3_unit_2_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_2_sep2_data_conv2d_pointwise')
vargface_stage_3_unit_2_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_2_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_2_sep2_data_conv2d_pointwise_bn')
plus7 = vargface_stage_3_unit_2_sep2_data_conv2d_pointwise_bn + vargface_stage_3_unit_1_out_data_act
vargface_stage_3_unit_2_out_data_act = prelu(plus7, name='vargface_stage_3_unit_2_out_data_act')
vargface_stage_3_unit_3_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_2_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_3_sep1_data_conv2d_depthwise = convolution(vargface_stage_3_unit_3_sep1_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_3_sep1_data_conv2d_depthwise')
vargface_stage_3_unit_3_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_3_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_3_sep1_data_conv2d_depthwise_bn')
vargface_stage_3_unit_3_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_3_sep1_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_3_sep1_data_conv2d_depthwise_act')
vargface_stage_3_unit_3_sep1_data_conv2d_pointwise = convolution(vargface_stage_3_unit_3_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_3_sep1_data_conv2d_pointwise')
vargface_stage_3_unit_3_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_3_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_3_sep1_data_conv2d_pointwise_bn')
vargface_stage_3_unit_3_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_3_unit_3_sep1_data_conv2d_pointwise_bn, name='vargface_stage_3_unit_3_sep1_data_conv2d_pointwise_act')
vargface_stage_3_unit_3_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_3_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_3_sep2_data_conv2d_depthwise = convolution(vargface_stage_3_unit_3_sep2_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_3_sep2_data_conv2d_depthwise')
vargface_stage_3_unit_3_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_3_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_3_sep2_data_conv2d_depthwise_bn')
vargface_stage_3_unit_3_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_3_sep2_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_3_sep2_data_conv2d_depthwise_act')
vargface_stage_3_unit_3_sep2_data_conv2d_pointwise = convolution(vargface_stage_3_unit_3_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_3_sep2_data_conv2d_pointwise')
vargface_stage_3_unit_3_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_3_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_3_sep2_data_conv2d_pointwise_bn')
plus8 = vargface_stage_3_unit_3_sep2_data_conv2d_pointwise_bn + vargface_stage_3_unit_2_out_data_act
vargface_stage_3_unit_3_out_data_act = prelu(plus8, name='vargface_stage_3_unit_3_out_data_act')
vargface_stage_3_unit_4_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_3_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_4_sep1_data_conv2d_depthwise = convolution(vargface_stage_3_unit_4_sep1_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_4_sep1_data_conv2d_depthwise')
vargface_stage_3_unit_4_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_4_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_4_sep1_data_conv2d_depthwise_bn')
vargface_stage_3_unit_4_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_4_sep1_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_4_sep1_data_conv2d_depthwise_act')
vargface_stage_3_unit_4_sep1_data_conv2d_pointwise = convolution(vargface_stage_3_unit_4_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_4_sep1_data_conv2d_pointwise')
vargface_stage_3_unit_4_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_4_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_4_sep1_data_conv2d_pointwise_bn')
vargface_stage_3_unit_4_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_3_unit_4_sep1_data_conv2d_pointwise_bn, name='vargface_stage_3_unit_4_sep1_data_conv2d_pointwise_act')
vargface_stage_3_unit_4_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_4_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_4_sep2_data_conv2d_depthwise = convolution(vargface_stage_3_unit_4_sep2_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_4_sep2_data_conv2d_depthwise')
vargface_stage_3_unit_4_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_4_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_4_sep2_data_conv2d_depthwise_bn')
vargface_stage_3_unit_4_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_4_sep2_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_4_sep2_data_conv2d_depthwise_act')
vargface_stage_3_unit_4_sep2_data_conv2d_pointwise = convolution(vargface_stage_3_unit_4_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_4_sep2_data_conv2d_pointwise')
vargface_stage_3_unit_4_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_4_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_4_sep2_data_conv2d_pointwise_bn')
plus9 = vargface_stage_3_unit_4_sep2_data_conv2d_pointwise_bn + vargface_stage_3_unit_3_out_data_act
vargface_stage_3_unit_4_out_data_act = prelu(plus9, name='vargface_stage_3_unit_4_out_data_act')
vargface_stage_3_unit_5_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_4_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_5_sep1_data_conv2d_depthwise = convolution(vargface_stage_3_unit_5_sep1_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_5_sep1_data_conv2d_depthwise')
vargface_stage_3_unit_5_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_5_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_5_sep1_data_conv2d_depthwise_bn')
vargface_stage_3_unit_5_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_5_sep1_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_5_sep1_data_conv2d_depthwise_act')
vargface_stage_3_unit_5_sep1_data_conv2d_pointwise = convolution(vargface_stage_3_unit_5_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_5_sep1_data_conv2d_pointwise')
vargface_stage_3_unit_5_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_5_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_5_sep1_data_conv2d_pointwise_bn')
vargface_stage_3_unit_5_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_3_unit_5_sep1_data_conv2d_pointwise_bn, name='vargface_stage_3_unit_5_sep1_data_conv2d_pointwise_act')
vargface_stage_3_unit_5_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_5_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_5_sep2_data_conv2d_depthwise = convolution(vargface_stage_3_unit_5_sep2_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_5_sep2_data_conv2d_depthwise')
vargface_stage_3_unit_5_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_5_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_5_sep2_data_conv2d_depthwise_bn')
vargface_stage_3_unit_5_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_5_sep2_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_5_sep2_data_conv2d_depthwise_act')
vargface_stage_3_unit_5_sep2_data_conv2d_pointwise = convolution(vargface_stage_3_unit_5_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_5_sep2_data_conv2d_pointwise')
vargface_stage_3_unit_5_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_5_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_5_sep2_data_conv2d_pointwise_bn')
plus10 = vargface_stage_3_unit_5_sep2_data_conv2d_pointwise_bn + vargface_stage_3_unit_4_out_data_act
vargface_stage_3_unit_5_out_data_act = prelu(plus10, name='vargface_stage_3_unit_5_out_data_act')
vargface_stage_3_unit_6_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_5_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_6_sep1_data_conv2d_depthwise = convolution(vargface_stage_3_unit_6_sep1_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_6_sep1_data_conv2d_depthwise')
vargface_stage_3_unit_6_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_6_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_6_sep1_data_conv2d_depthwise_bn')
vargface_stage_3_unit_6_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_6_sep1_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_6_sep1_data_conv2d_depthwise_act')
vargface_stage_3_unit_6_sep1_data_conv2d_pointwise = convolution(vargface_stage_3_unit_6_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_6_sep1_data_conv2d_pointwise')
vargface_stage_3_unit_6_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_6_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_6_sep1_data_conv2d_pointwise_bn')
vargface_stage_3_unit_6_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_3_unit_6_sep1_data_conv2d_pointwise_bn, name='vargface_stage_3_unit_6_sep1_data_conv2d_pointwise_act')
vargface_stage_3_unit_6_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_6_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_6_sep2_data_conv2d_depthwise = convolution(vargface_stage_3_unit_6_sep2_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_6_sep2_data_conv2d_depthwise')
vargface_stage_3_unit_6_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_6_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_6_sep2_data_conv2d_depthwise_bn')
vargface_stage_3_unit_6_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_6_sep2_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_6_sep2_data_conv2d_depthwise_act')
vargface_stage_3_unit_6_sep2_data_conv2d_pointwise = convolution(vargface_stage_3_unit_6_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_6_sep2_data_conv2d_pointwise')
vargface_stage_3_unit_6_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_6_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_6_sep2_data_conv2d_pointwise_bn')
plus11 = vargface_stage_3_unit_6_sep2_data_conv2d_pointwise_bn + vargface_stage_3_unit_5_out_data_act
vargface_stage_3_unit_6_out_data_act = prelu(plus11, name='vargface_stage_3_unit_6_out_data_act')
vargface_stage_3_unit_7_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_6_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_7_sep1_data_conv2d_depthwise = convolution(vargface_stage_3_unit_7_sep1_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_7_sep1_data_conv2d_depthwise')
vargface_stage_3_unit_7_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_7_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_7_sep1_data_conv2d_depthwise_bn')
vargface_stage_3_unit_7_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_7_sep1_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_7_sep1_data_conv2d_depthwise_act')
vargface_stage_3_unit_7_sep1_data_conv2d_pointwise = convolution(vargface_stage_3_unit_7_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_7_sep1_data_conv2d_pointwise')
vargface_stage_3_unit_7_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_7_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_7_sep1_data_conv2d_pointwise_bn')
vargface_stage_3_unit_7_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_3_unit_7_sep1_data_conv2d_pointwise_bn, name='vargface_stage_3_unit_7_sep1_data_conv2d_pointwise_act')
vargface_stage_3_unit_7_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_7_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_3_unit_7_sep2_data_conv2d_depthwise = convolution(vargface_stage_3_unit_7_sep2_data_conv2d_depthwise_pad, group=20, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_7_sep2_data_conv2d_depthwise')
vargface_stage_3_unit_7_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_3_unit_7_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_7_sep2_data_conv2d_depthwise_bn')
vargface_stage_3_unit_7_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_3_unit_7_sep2_data_conv2d_depthwise_bn, name='vargface_stage_3_unit_7_sep2_data_conv2d_depthwise_act')
vargface_stage_3_unit_7_sep2_data_conv2d_pointwise = convolution(vargface_stage_3_unit_7_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_3_unit_7_sep2_data_conv2d_pointwise')
vargface_stage_3_unit_7_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_3_unit_7_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_3_unit_7_sep2_data_conv2d_pointwise_bn')
plus12 = vargface_stage_3_unit_7_sep2_data_conv2d_pointwise_bn + vargface_stage_3_unit_6_out_data_act
vargface_stage_3_unit_7_out_data_act = prelu(plus12, name='vargface_stage_3_unit_7_out_data_act')
vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_7_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise = convolution(vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise_pad, group=20, strides=[2, 2], padding='VALID', name='vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise')
vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_7_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise = convolution(vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise_pad, group=20, strides=[2, 2], padding='VALID', name='vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise')
vargface_stage_4_unit_1_shortcut_conv2d_depthwise_pad = tf.pad(vargface_stage_3_unit_7_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_1_shortcut_conv2d_depthwise = convolution(vargface_stage_4_unit_1_shortcut_conv2d_depthwise_pad, group=20, strides=[2, 2], padding='VALID', name='vargface_stage_4_unit_1_shortcut_conv2d_depthwise')
vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise_bn')
vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise_bn')
vargface_stage_4_unit_1_shortcut_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_1_shortcut_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_1_shortcut_conv2d_depthwise_bn')
vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise_act = prelu(vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise_bn, name='vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise_act')
vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise_act = prelu(vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise_bn, name='vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise_act')
vargface_stage_4_unit_1_shortcut_conv2d_depthwise_act = prelu(vargface_stage_4_unit_1_shortcut_conv2d_depthwise_bn, name='vargface_stage_4_unit_1_shortcut_conv2d_depthwise_act')
vargface_stage_4_unit_1_sep1_data_branch_conv2d_pointwise = convolution(vargface_stage_4_unit_1_sep1_data_branch_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_1_sep1_data_branch_conv2d_pointwise')
vargface_stage_4_unit_1_sep2_data_branch_conv2d_pointwise = convolution(vargface_stage_4_unit_1_sep2_data_branch_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_1_sep2_data_branch_conv2d_pointwise')
vargface_stage_4_unit_1_shortcut_conv2d_pointwise = convolution(vargface_stage_4_unit_1_shortcut_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_1_shortcut_conv2d_pointwise')
vargface_stage_4_unit_1_sep1_data_branch_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_1_sep1_data_branch_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_1_sep1_data_branch_conv2d_pointwise_bn')
vargface_stage_4_unit_1_sep2_data_branch_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_1_sep2_data_branch_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_1_sep2_data_branch_conv2d_pointwise_bn')
vargface_stage_4_unit_1_shortcut_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_1_shortcut_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_1_shortcut_conv2d_pointwise_bn')
plus13 = vargface_stage_4_unit_1_sep1_data_branch_conv2d_pointwise_bn + vargface_stage_4_unit_1_sep2_data_branch_conv2d_pointwise_bn
vargface_stage_4_unit_1_sep1_data_act = prelu(plus13, name='vargface_stage_4_unit_1_sep1_data_act')
vargface_stage_4_unit_1_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_4_unit_1_sep1_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_1_sep2_data_conv2d_depthwise = convolution(vargface_stage_4_unit_1_sep2_data_conv2d_depthwise_pad, group=40, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_1_sep2_data_conv2d_depthwise')
vargface_stage_4_unit_1_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_1_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_1_sep2_data_conv2d_depthwise_bn')
vargface_stage_4_unit_1_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_4_unit_1_sep2_data_conv2d_depthwise_bn, name='vargface_stage_4_unit_1_sep2_data_conv2d_depthwise_act')
vargface_stage_4_unit_1_sep2_data_conv2d_pointwise = convolution(vargface_stage_4_unit_1_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_1_sep2_data_conv2d_pointwise')
vargface_stage_4_unit_1_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_1_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_1_sep2_data_conv2d_pointwise_bn')
plus14 = vargface_stage_4_unit_1_sep2_data_conv2d_pointwise_bn + vargface_stage_4_unit_1_shortcut_conv2d_pointwise_bn
vargface_stage_4_unit_1_out_data_act = prelu(plus14, name='vargface_stage_4_unit_1_out_data_act')
vargface_stage_4_unit_2_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_4_unit_1_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_2_sep1_data_conv2d_depthwise = convolution(vargface_stage_4_unit_2_sep1_data_conv2d_depthwise_pad, group=40, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_2_sep1_data_conv2d_depthwise')
vargface_stage_4_unit_2_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_2_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_2_sep1_data_conv2d_depthwise_bn')
vargface_stage_4_unit_2_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_4_unit_2_sep1_data_conv2d_depthwise_bn, name='vargface_stage_4_unit_2_sep1_data_conv2d_depthwise_act')
vargface_stage_4_unit_2_sep1_data_conv2d_pointwise = convolution(vargface_stage_4_unit_2_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_2_sep1_data_conv2d_pointwise')
vargface_stage_4_unit_2_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_2_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_2_sep1_data_conv2d_pointwise_bn')
vargface_stage_4_unit_2_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_4_unit_2_sep1_data_conv2d_pointwise_bn, name='vargface_stage_4_unit_2_sep1_data_conv2d_pointwise_act')
vargface_stage_4_unit_2_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_4_unit_2_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_2_sep2_data_conv2d_depthwise = convolution(vargface_stage_4_unit_2_sep2_data_conv2d_depthwise_pad, group=40, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_2_sep2_data_conv2d_depthwise')
vargface_stage_4_unit_2_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_2_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_2_sep2_data_conv2d_depthwise_bn')
vargface_stage_4_unit_2_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_4_unit_2_sep2_data_conv2d_depthwise_bn, name='vargface_stage_4_unit_2_sep2_data_conv2d_depthwise_act')
vargface_stage_4_unit_2_sep2_data_conv2d_pointwise = convolution(vargface_stage_4_unit_2_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_2_sep2_data_conv2d_pointwise')
vargface_stage_4_unit_2_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_2_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_2_sep2_data_conv2d_pointwise_bn')
plus15 = vargface_stage_4_unit_2_sep2_data_conv2d_pointwise_bn + vargface_stage_4_unit_1_out_data_act
vargface_stage_4_unit_2_out_data_act = prelu(plus15, name='vargface_stage_4_unit_2_out_data_act')
vargface_stage_4_unit_3_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_4_unit_2_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_3_sep1_data_conv2d_depthwise = convolution(vargface_stage_4_unit_3_sep1_data_conv2d_depthwise_pad, group=40, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_3_sep1_data_conv2d_depthwise')
vargface_stage_4_unit_3_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_3_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_3_sep1_data_conv2d_depthwise_bn')
vargface_stage_4_unit_3_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_4_unit_3_sep1_data_conv2d_depthwise_bn, name='vargface_stage_4_unit_3_sep1_data_conv2d_depthwise_act')
vargface_stage_4_unit_3_sep1_data_conv2d_pointwise = convolution(vargface_stage_4_unit_3_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_3_sep1_data_conv2d_pointwise')
vargface_stage_4_unit_3_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_3_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_3_sep1_data_conv2d_pointwise_bn')
vargface_stage_4_unit_3_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_4_unit_3_sep1_data_conv2d_pointwise_bn, name='vargface_stage_4_unit_3_sep1_data_conv2d_pointwise_act')
vargface_stage_4_unit_3_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_4_unit_3_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_3_sep2_data_conv2d_depthwise = convolution(vargface_stage_4_unit_3_sep2_data_conv2d_depthwise_pad, group=40, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_3_sep2_data_conv2d_depthwise')
vargface_stage_4_unit_3_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_3_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_3_sep2_data_conv2d_depthwise_bn')
vargface_stage_4_unit_3_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_4_unit_3_sep2_data_conv2d_depthwise_bn, name='vargface_stage_4_unit_3_sep2_data_conv2d_depthwise_act')
vargface_stage_4_unit_3_sep2_data_conv2d_pointwise = convolution(vargface_stage_4_unit_3_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_3_sep2_data_conv2d_pointwise')
vargface_stage_4_unit_3_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_3_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_3_sep2_data_conv2d_pointwise_bn')
plus16 = vargface_stage_4_unit_3_sep2_data_conv2d_pointwise_bn + vargface_stage_4_unit_2_out_data_act
vargface_stage_4_unit_3_out_data_act = prelu(plus16, name='vargface_stage_4_unit_3_out_data_act')
vargface_stage_4_unit_4_sep1_data_conv2d_depthwise_pad = tf.pad(vargface_stage_4_unit_3_out_data_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_4_sep1_data_conv2d_depthwise = convolution(vargface_stage_4_unit_4_sep1_data_conv2d_depthwise_pad, group=40, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_4_sep1_data_conv2d_depthwise')
vargface_stage_4_unit_4_sep1_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_4_sep1_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_4_sep1_data_conv2d_depthwise_bn')
vargface_stage_4_unit_4_sep1_data_conv2d_depthwise_act = prelu(vargface_stage_4_unit_4_sep1_data_conv2d_depthwise_bn, name='vargface_stage_4_unit_4_sep1_data_conv2d_depthwise_act')
vargface_stage_4_unit_4_sep1_data_conv2d_pointwise = convolution(vargface_stage_4_unit_4_sep1_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_4_sep1_data_conv2d_pointwise')
vargface_stage_4_unit_4_sep1_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_4_sep1_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_4_sep1_data_conv2d_pointwise_bn')
vargface_stage_4_unit_4_sep1_data_conv2d_pointwise_act = prelu(vargface_stage_4_unit_4_sep1_data_conv2d_pointwise_bn, name='vargface_stage_4_unit_4_sep1_data_conv2d_pointwise_act')
vargface_stage_4_unit_4_sep2_data_conv2d_depthwise_pad = tf.pad(vargface_stage_4_unit_4_sep1_data_conv2d_pointwise_act, paddings = [[0, 0], [1, 1], [1, 1], [0, 0]])
vargface_stage_4_unit_4_sep2_data_conv2d_depthwise = convolution(vargface_stage_4_unit_4_sep2_data_conv2d_depthwise_pad, group=40, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_4_sep2_data_conv2d_depthwise')
vargface_stage_4_unit_4_sep2_data_conv2d_depthwise_bn = batch_normalization(vargface_stage_4_unit_4_sep2_data_conv2d_depthwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_4_sep2_data_conv2d_depthwise_bn')
vargface_stage_4_unit_4_sep2_data_conv2d_depthwise_act = prelu(vargface_stage_4_unit_4_sep2_data_conv2d_depthwise_bn, name='vargface_stage_4_unit_4_sep2_data_conv2d_depthwise_act')
vargface_stage_4_unit_4_sep2_data_conv2d_pointwise = convolution(vargface_stage_4_unit_4_sep2_data_conv2d_depthwise_act, group=1, strides=[1, 1], padding='VALID', name='vargface_stage_4_unit_4_sep2_data_conv2d_pointwise')
vargface_stage_4_unit_4_sep2_data_conv2d_pointwise_bn = batch_normalization(vargface_stage_4_unit_4_sep2_data_conv2d_pointwise, variance_epsilon=1.9999999494757503e-05, name='vargface_stage_4_unit_4_sep2_data_conv2d_pointwise_bn')
plus17 = vargface_stage_4_unit_4_sep2_data_conv2d_pointwise_bn + vargface_stage_4_unit_3_out_data_act
vargface_stage_4_unit_4_out_data_act = prelu(plus17, name='vargface_stage_4_unit_4_out_data_act')
embed_convx = convolution(vargface_stage_4_unit_4_out_data_act, group=1, strides=[1, 1], padding='VALID', name='embed_convx')
embed_convx_bn = batch_normalization(embed_convx, variance_epsilon=1.9999999494757503e-05, name='embed_convx_bn')
embed_convx_act = prelu(embed_convx_bn, name='embed_convx_act')
embed_convx_depthwise = convolution(embed_convx_act, group=128, strides=[1, 1], padding='VALID', name='embed_convx_depthwise')
embed_convx_depthwise_bn = batch_normalization(embed_convx_depthwise, variance_epsilon=1.9999999494757503e-05, name='embed_convx_depthwise_bn')
embed_convx_pointwise = convolution(embed_convx_depthwise_bn, group=1, strides=[1, 1], padding='VALID', name='embed_convx_pointwise')
embed_convx_pointwise_bn = batch_normalization(embed_convx_pointwise, variance_epsilon=1.9999999494757503e-05, name='embed_convx_pointwise_bn')
embed_convx_pointwise_act = prelu(embed_convx_pointwise_bn, name='embed_convx_pointwise_act')
pre_fc1_flatten = tf.contrib.layers.flatten(embed_convx_pointwise_act)
pre_fc1 = tf.layers.dense(pre_fc1_flatten, 512, use_bias = True)
fc1 = batch_normalization(pre_fc1, variance_epsilon=1.9999999494757503e-05, name='fc1')
return data, fc1
def prelu(input, name):
gamma = tf.Variable(__weights_dict[name]['gamma'], name=name + "_gamma", trainable=is_train)
return tf.maximum(0.0, input) + gamma * tf.minimum(0.0, input)
def batch_normalization(input, name, **kwargs):
mean = tf.Variable(__weights_dict[name]['mean'], name = name + "_mean", trainable = is_train)
variance = tf.Variable(__weights_dict[name]['var'], name = name + "_var", trainable = is_train)
offset = tf.Variable(__weights_dict[name]['bias'], name = name + "_bias", trainable = is_train) if 'bias' in __weights_dict[name] else None
scale = tf.Variable(__weights_dict[name]['scale'], name = name + "_scale", trainable = is_train) if 'scale' in __weights_dict[name] else None
return tf.nn.batch_normalization(input, mean, variance, offset, scale, name = name, **kwargs)
def convolution(input, name, group, **kwargs):
w = tf.Variable(__weights_dict[name]['weights'], trainable=is_train, name=name + "_weight")
if group == 1:
layer = tf.nn.convolution(input, w, name=name, **kwargs)
else:
weight_groups = tf.split(w, num_or_size_splits=group, axis=-1)
xs = tf.split(input, num_or_size_splits=group, axis=-1)
convolved = [tf.nn.convolution(x, weight, name=name, **kwargs) for
(x, weight) in zip(xs, weight_groups)]
layer = tf.concat(convolved, axis=-1)
if 'bias' in __weights_dict[name]:
b = tf.Variable(__weights_dict[name]['bias'], trainable=is_train, name=name + "_bias")
layer = layer + b
return layer
| 166.79023
| 255
| 0.866857
| 9,379
| 58,043
| 4.663184
| 0.014927
| 0.20123
| 0.103073
| 0.132522
| 0.957907
| 0.949287
| 0.938289
| 0.934105
| 0.90765
| 0.832632
| 0
| 0.088411
| 0.063849
| 58,043
| 347
| 256
| 167.270893
| 0.716492
| 0
| 0
| 0
| 0
| 0
| 0.205816
| 0.195031
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015152
| false
| 0
| 0.006061
| 0
| 0.039394
| 0.00303
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
863d4b25e492b0a1bd0fed7c7676386eedd815db
| 273
|
py
|
Python
|
src/dp_json_util/__init__.py
|
dataPuzzler/dp_json_util
|
6ed5f897c76187d65eb502ffe52934bb9c1febac
|
[
"BSD-2-Clause"
] | null | null | null |
src/dp_json_util/__init__.py
|
dataPuzzler/dp_json_util
|
6ed5f897c76187d65eb502ffe52934bb9c1febac
|
[
"BSD-2-Clause"
] | null | null | null |
src/dp_json_util/__init__.py
|
dataPuzzler/dp_json_util
|
6ed5f897c76187d65eb502ffe52934bb9c1febac
|
[
"BSD-2-Clause"
] | null | null | null |
from .retriever import InvalidJsonLocationDirPathException, JsonRetriever, JsonSchemaRetriever, UnsetJsonLocation, UnsetJsonLocationException
__all__ = [JsonRetriever, JsonSchemaRetriever, UnsetJsonLocation, UnsetJsonLocationException, InvalidJsonLocationDirPathException]
| 91
| 141
| 0.901099
| 14
| 273
| 17.285714
| 0.642857
| 0.264463
| 0.404959
| 0.619835
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054945
| 273
| 3
| 142
| 91
| 0.937985
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
864965367b8eb3e89f4b3cc9d7ca095927df7e3c
| 4,425
|
py
|
Python
|
tests/libs/test_redispersistence.py
|
jamesperes/autonomiaBot
|
bbc90497d29fa16a85477281f87ceaeb7c3e55fc
|
[
"MIT"
] | 8
|
2018-03-22T21:46:30.000Z
|
2021-04-07T07:06:05.000Z
|
tests/libs/test_redispersistence.py
|
jamesperes/autonomiaBot
|
bbc90497d29fa16a85477281f87ceaeb7c3e55fc
|
[
"MIT"
] | 42
|
2018-03-23T17:20:38.000Z
|
2021-06-02T01:19:51.000Z
|
tests/libs/test_redispersistence.py
|
jamesperes/autonomiaBot
|
bbc90497d29fa16a85477281f87ceaeb7c3e55fc
|
[
"MIT"
] | 3
|
2018-03-26T20:30:53.000Z
|
2020-01-18T13:52:32.000Z
|
from collections import defaultdict
from unittest.mock import MagicMock
import pytest
from autonomia.libs.redispersistence import TelegramRedisPersistence
@pytest.fixture(scope="function")
def redis_client_mock():
return MagicMock()
def test_init_redis_persistence(redis_client_mock):
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
assert r.store_bot_data
assert r.store_chat_data
assert r.store_user_data
assert r.key_prefix == "bot:"
assert r.redis_client == redis_client_mock
def test_get_user_data_with_empty_storage(redis_client_mock):
redis_client_mock.hgetall.return_value = None
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = r.get_user_data()
assert isinstance(data, defaultdict)
assert len(data) == 0
def test_get_user_data_with_data_in_storage(redis_client_mock):
redis_client_mock.hgetall.return_value = {b"1234567": b'{"saved_data": "sample"}'}
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = r.get_user_data()
assert isinstance(data, defaultdict)
assert data[1234567] == {"saved_data": "sample"}
def test_update_user_data(redis_client_mock):
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = {"saved_data": "sample"}
r.update_user_data(1234567, data)
redis_client_mock.hset.assert_called_once_with(
"bot:user_data", 1234567, '{"saved_data": "sample"}'
)
def test_get_chat_data_with_empty_storage(redis_client_mock):
redis_client_mock.hgetall.return_value = None
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = r.get_chat_data()
assert isinstance(data, defaultdict)
assert len(data) == 0
def test_get_chat_data_with_data_in_storage(redis_client_mock):
redis_client_mock.hgetall.return_value = {b"99999": b'{"saved_data": "sample"}'}
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = r.get_chat_data()
assert isinstance(data, defaultdict)
assert data[99999] == {"saved_data": "sample"}
def test_update_chat_data(redis_client_mock):
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = {"saved_data": "sample"}
r.update_chat_data(99999, data)
redis_client_mock.hset.assert_called_once_with(
"bot:chat_data", 99999, '{"saved_data": "sample"}'
)
def test_get_bot_data_with_empty_storage(redis_client_mock):
redis_client_mock.get.return_value = None
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = r.get_bot_data()
assert isinstance(data, defaultdict)
assert len(data) == 0
def test_get_bot_data_with_data_in_storage(redis_client_mock):
redis_client_mock.get.return_value = b'{"saved_data": "sample"}'
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = r.get_bot_data()
assert isinstance(data, defaultdict)
assert data == {"saved_data": "sample"}
def test_update_bot_data(redis_client_mock):
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = {"saved_data": "sample"}
r.update_bot_data(data)
redis_client_mock.set.assert_called_once_with(
"bot:bot_data", '{"saved_data": "sample"}'
)
def test_get_conversation_data_with_empty_storage(redis_client_mock):
redis_client_mock.hgetall.return_value = None
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = r.get_conversations("handler_name")
assert isinstance(data, dict)
assert len(data) == 0
def test_get_conversation_data_with_data_in_storage(redis_client_mock):
redis_client_mock.hgetall.return_value = {b"[99999]": b'{"saved_data": "sample"}'}
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
data = r.get_conversations("handler_name")
assert isinstance(data, dict)
assert data[(99999,)] == {"saved_data": "sample"}
def test_update_conversation_data(redis_client_mock):
r = TelegramRedisPersistence(redis_client=redis_client_mock, key_prefix="bot:")
r.update_conversation("handler_name", (99999,), 0)
redis_client_mock.hset.assert_called_once_with(
"bot:conversation_data:handler_name", "[99999]", "0"
)
| 36.875
| 86
| 0.753898
| 598
| 4,425
| 5.170569
| 0.095318
| 0.192109
| 0.194049
| 0.099612
| 0.855433
| 0.830207
| 0.78978
| 0.751294
| 0.751294
| 0.713777
| 0
| 0.019316
| 0.134237
| 4,425
| 119
| 87
| 37.184874
| 0.787784
| 0
| 0
| 0.47191
| 0
| 0
| 0.108249
| 0.007684
| 0
| 0
| 0
| 0
| 0.280899
| 1
| 0.157303
| false
| 0
| 0.044944
| 0.011236
| 0.213483
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86799ce30b3eda5be6b1db4480a8fe18c2b3d7dc
| 75
|
py
|
Python
|
datasets/__init__.py
|
mingcv/Bread
|
20dedfe2105b08ce8499b216c3c2bfd3699af17f
|
[
"Apache-2.0"
] | 24
|
2021-12-01T02:13:54.000Z
|
2022-02-06T06:40:40.000Z
|
datasets/__init__.py
|
mingcv/Bread
|
20dedfe2105b08ce8499b216c3c2bfd3699af17f
|
[
"Apache-2.0"
] | null | null | null |
datasets/__init__.py
|
mingcv/Bread
|
20dedfe2105b08ce8499b216c3c2bfd3699af17f
|
[
"Apache-2.0"
] | 6
|
2021-12-01T02:14:11.000Z
|
2021-12-23T12:50:06.000Z
|
from .low_light import *
from .low_light_test import *
from .mef import *
| 15
| 29
| 0.746667
| 12
| 75
| 4.416667
| 0.5
| 0.264151
| 0.45283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173333
| 75
| 4
| 30
| 18.75
| 0.854839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
86b1e5bd319ddd3e1f593bb30c29436a752a9623
| 5,931
|
py
|
Python
|
shakespearelang/tests/unit/test_gotos.py
|
btc1311/shakespearelang
|
5cdbd5023252f2ada124d1b2d2390c9d7e79e395
|
[
"MIT"
] | null | null | null |
shakespearelang/tests/unit/test_gotos.py
|
btc1311/shakespearelang
|
5cdbd5023252f2ada124d1b2d2390c9d7e79e395
|
[
"MIT"
] | null | null | null |
shakespearelang/tests/unit/test_gotos.py
|
btc1311/shakespearelang
|
5cdbd5023252f2ada124d1b2d2390c9d7e79e395
|
[
"MIT"
] | null | null | null |
from shakespearelang.shakespeare_interpreter import Shakespeare
from io import StringIO
import pytest
SAMPLE_PLAY = """
Test.
Romeo, a test.
Juliet, a test.
Macbeth, a test.
Act I: Nothing to see here.
Scene I: These are not the actors you're looking for.
[Enter Romeo and Juliet]
Juliet: Are you as good as nothing?
Scene II: Still nothing.
Scene III: Nothing strikes back.
Act II: So separate.
Scene I: This is hard to get to.
Scene II: Likewise.
Scene III: Yep.
Scene IV: Still going.
"""
def test_goto_current(monkeypatch, capsys):
s = Shakespeare()
s.load_play(SAMPLE_PLAY)
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
s.step_forward()
assert s.current_position == {'act': 0, 'scene': 0, 'event': 1}
s.run_sentence('Let us proceed to scene I.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
captured = capsys.readouterr()
assert captured.out == ''
assert captured.err == ''
def test_goto_next(monkeypatch, capsys):
s = Shakespeare()
s.load_play(SAMPLE_PLAY)
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
s.step_forward()
assert s.current_position == {'act': 0, 'scene': 0, 'event': 1}
s.run_sentence('Let us proceed to scene II.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 1, 'event': 0}
captured = capsys.readouterr()
assert captured.out == ''
assert captured.err == ''
def test_goto_prev(monkeypatch, capsys):
s = Shakespeare()
s.load_play(SAMPLE_PLAY)
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
s.step_forward()
assert s.current_position == {'act': 0, 'scene': 0, 'event': 1}
s.step_forward()
assert s.current_position == {'act': 0, 'scene': 1, 'event': 0}
s.run_sentence('Let us return to scene I.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
captured = capsys.readouterr()
assert captured.out == ''
assert captured.err == ''
def test_goto_without_opposite_character(monkeypatch, capsys):
s = Shakespeare()
s.load_play(SAMPLE_PLAY)
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
s.step_forward()
assert s.current_position == {'act': 0, 'scene': 0, 'event': 1}
s.run_event('[Exit Romeo]')
s.run_sentence('Let us proceed to scene II.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 1, 'event': 0}
s.run_event('[Enter Romeo and Macbeth]')
s.run_sentence('Let us proceed to scene I.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
captured = capsys.readouterr()
assert captured.out == ''
assert captured.err == ''
def test_goto_conditionals(monkeypatch, capsys):
s = Shakespeare()
s.load_play(SAMPLE_PLAY)
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
s.step_forward()
assert s.current_position == {'act': 0, 'scene': 0, 'event': 1}
s.global_boolean = True
s.run_sentence('If so, let us proceed to scene II.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 1, 'event': 0}
s.global_boolean = True
s.run_sentence('If not, let us proceed to scene I.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 1, 'event': 0}
s.global_boolean = False
s.run_sentence('If so, let us proceed to scene I.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 1, 'event': 0}
s.global_boolean = False
s.run_sentence('If not, let us proceed to scene I.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
captured = capsys.readouterr()
assert captured.out == ''
assert captured.err == ''
def test_goto_based_on_numeral_not_order(monkeypatch, capsys):
s = Shakespeare()
s.load_play("""
Test.
Romeo, a test.
Juliet, a test.
Act I: Nothing to see here.
Scene III: These are not the actors you're looking for.
[Enter Romeo and Juliet]
Juliet: Are you as good as nothing?
Scene I: Still nothing.
Scene II: Nothing strikes back.
""")
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
s.step_forward()
assert s.current_position == {'act': 0, 'scene': 0, 'event': 1}
s.run_sentence('Let us return to scene I.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 1, 'event': 0}
s.run_sentence('Let us return to scene III.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
s.run_sentence('Let us return to scene II.', s._on_stage_character_by_name('Juliet'))
assert s.current_position == {'act': 0, 'scene': 2, 'event': 0}
captured = capsys.readouterr()
assert captured.out == ''
assert captured.err == ''
def test_errors_on_goto_nonexistent(monkeypatch, capsys):
s = Shakespeare()
s.load_play(SAMPLE_PLAY)
assert s.current_position == {'act': 0, 'scene': 0, 'event': 0}
s.step_forward()
assert s.current_position == {'act': 0, 'scene': 0, 'event': 1}
s.step_forward()
assert s.current_position == {'act': 0, 'scene': 1, 'event': 0}
with pytest.raises(Exception) as exc:
s.run_sentence('Let us proceed to scene IV.', s._on_stage_character_by_name('Juliet'))
assert 'does not exist' in str(exc.value).lower()
assert s.current_position == {'act': 0, 'scene': 1, 'event': 0}
captured = capsys.readouterr()
assert captured.out == ''
assert captured.err == ''
| 35.945455
| 97
| 0.643736
| 863
| 5,931
| 4.239861
| 0.122827
| 0.05548
| 0.110959
| 0.174365
| 0.869363
| 0.869363
| 0.869363
| 0.860071
| 0.821809
| 0.805411
| 0
| 0.018382
| 0.20199
| 5,931
| 164
| 98
| 36.164634
| 0.754701
| 0
| 0
| 0.725191
| 0
| 0
| 0.275164
| 0
| 0
| 0
| 0
| 0
| 0.335878
| 1
| 0.053435
| false
| 0
| 0.022901
| 0
| 0.076336
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
86ec3cfdf618a6e35978ecffdf65fec4565b4b46
| 76
|
py
|
Python
|
languages/python/topics/webscraping/P003_IdentifyWebsiteOwner.py
|
lakshmikanth-tesla/DeveloperNotes2Myself
|
9a5dad930ddbb99ace46d2d672109e8553aecbc2
|
[
"MIT"
] | 2
|
2019-05-25T10:09:00.000Z
|
2022-03-11T09:06:23.000Z
|
languages/python/topics/webscraping/P003_IdentifyWebsiteOwner.py
|
lakshmikanth-tesla/DeveloperNotes2Myself
|
9a5dad930ddbb99ace46d2d672109e8553aecbc2
|
[
"MIT"
] | 2
|
2020-03-31T04:30:17.000Z
|
2020-10-30T07:54:28.000Z
|
languages/python/topics/webscraping/P003_IdentifyWebsiteOwner.py
|
lakshmikanth-tesla/DeveloperNotes2Myself
|
9a5dad930ddbb99ace46d2d672109e8553aecbc2
|
[
"MIT"
] | 4
|
2019-07-12T13:18:56.000Z
|
2021-11-17T08:04:55.000Z
|
import whois
print whois.whois('google.com')
print whois.whois('apple.com')
| 19
| 31
| 0.763158
| 12
| 76
| 4.833333
| 0.5
| 0.344828
| 0.517241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 76
| 4
| 32
| 19
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0.246753
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
81019080592b882a69624a73bc59a00f293e8361
| 35,400
|
py
|
Python
|
airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py
|
jd-sanders/airbyte
|
fa999d26ee415e89672ae96bfab315c6b180ea9f
|
[
"MIT"
] | 1
|
2021-11-21T22:35:45.000Z
|
2021-11-21T22:35:45.000Z
|
airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py
|
jd-sanders/airbyte
|
fa999d26ee415e89672ae96bfab315c6b180ea9f
|
[
"MIT"
] | 2
|
2022-03-08T21:35:35.000Z
|
2022-03-08T21:35:39.000Z
|
airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py
|
Mashey/airbyte
|
415f55dee56a65994b9f91cc3e166238cebc7e3b
|
[
"MIT"
] | 1
|
2021-08-05T11:22:37.000Z
|
2021-08-05T11:22:37.000Z
|
#
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
from unittest.mock import MagicMock
import pytest
from airbyte_cdk.models import (
AirbyteMessage,
AirbyteRecordMessage,
AirbyteStream,
ConfiguredAirbyteCatalog,
ConfiguredAirbyteStream,
ConnectorSpecification,
Type,
)
from source_acceptance_test.config import BasicReadTestConfig
from source_acceptance_test.tests.test_core import TestBasicRead as _TestBasicRead
from source_acceptance_test.tests.test_core import TestDiscovery as _TestDiscovery
from source_acceptance_test.tests.test_core import TestSpec as _TestSpec
@pytest.mark.parametrize(
"schema, cursors, should_fail",
[
({}, ["created"], True),
({"properties": {"created": {"type": "string"}}}, ["created"], False),
({"properties": {"created_at": {"type": "string"}}}, ["created"], True),
({"properties": {"created": {"type": "string"}}}, ["updated", "created"], True),
({"properties": {"updated": {"type": "object", "properties": {"created": {"type": "string"}}}}}, ["updated", "created"], False),
({"properties": {"created": {"type": "object", "properties": {"updated": {"type": "string"}}}}}, ["updated", "created"], True),
],
)
def test_discovery(schema, cursors, should_fail):
t = _TestDiscovery()
discovered_catalog = {
"test_stream": AirbyteStream.parse_obj({"name": "test_stream", "json_schema": schema, "default_cursor_field": cursors})
}
if should_fail:
with pytest.raises(AssertionError):
t.test_defined_cursors_exist_in_schema(None, discovered_catalog)
else:
t.test_defined_cursors_exist_in_schema(None, discovered_catalog)
@pytest.mark.parametrize(
"schema, should_fail",
[
({}, False),
({"$ref": None}, True),
({"properties": {"user": {"$ref": None}}}, True),
({"properties": {"user": {"$ref": "user.json"}}}, True),
({"properties": {"user": {"type": "object", "properties": {"username": {"type": "string"}}}}}, False),
({"properties": {"fake_items": {"type": "array", "items": {"$ref": "fake_item.json"}}}}, True),
(
{
"properties": {
"fake_items": {
"oneOf": [{"type": "object", "$ref": "fake_items_1.json"}, {"type": "object", "$ref": "fake_items_2.json"}]
}
}
},
True,
),
],
)
def test_ref_in_discovery_schemas(schema, should_fail):
t = _TestDiscovery()
discovered_catalog = {"test_stream": AirbyteStream.parse_obj({"name": "test_stream", "json_schema": schema})}
if should_fail:
with pytest.raises(AssertionError):
t.test_defined_refs_exist_in_schema(None, discovered_catalog)
else:
t.test_defined_refs_exist_in_schema(None, discovered_catalog)
@pytest.mark.parametrize(
"schema, record, should_fail",
[
({"type": "object"}, {"aa": 23}, False),
({"type": "object"}, {}, False),
({"type": "object", "properties": {"created": {"type": "string"}}}, {"aa": 23}, True),
({"type": "object", "properties": {"created": {"type": "string"}}}, {"created": "23"}, False),
({"type": "object", "properties": {"created": {"type": "string"}}}, {"root": {"created": "23"}}, True),
# Recharge shop stream case
(
{"type": "object", "properties": {"shop": {"type": ["null", "object"]}, "store": {"type": ["null", "object"]}}},
{"shop": {"a": "23"}, "store": {"b": "23"}},
False,
),
],
)
def test_read(schema, record, should_fail):
catalog = ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj({"name": "test_stream", "json_schema": schema}),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
)
input_config = BasicReadTestConfig()
docker_runner_mock = MagicMock()
docker_runner_mock.call_read.return_value = [
AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="test_stream", data=record, emitted_at=111))
]
t = _TestBasicRead()
if should_fail:
with pytest.raises(AssertionError, match="stream should have some fields mentioned by json schema"):
t.test_read(None, catalog, input_config, [], docker_runner_mock, MagicMock())
else:
t.test_read(None, catalog, input_config, [], docker_runner_mock, MagicMock())
@pytest.mark.parametrize(
"connector_spec, expected_error",
[
# SUCCESS: no authSpecification specified
(ConnectorSpecification(connectionSpecification={}), ""),
# FAIL: Field specified in root object does not exist
(
ConnectorSpecification(
connectionSpecification={"type": "object"},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 0],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"Specified oauth fields are missed from spec schema:",
),
# SUCCESS: Empty root object
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": [],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"",
),
# FAIL: Some oauth fields missed
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
},
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 0],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"Specified oauth fields are missed from spec schema:",
),
# SUCCESS: case w/o oneOf property
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
},
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials"],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"",
),
# SUCCESS: case w/ oneOf property
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"oneOf": [
{
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
}
},
{
"properties": {
"api_key": {"type": "string"},
}
},
],
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 0],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"",
),
# FAIL: Wrong root object index
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"oneOf": [
{
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
}
},
{
"properties": {
"api_key": {"type": "string"},
}
},
],
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 1],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"Specified oauth fields are missed from spec schema:",
),
# SUCCESS: root object index equal to 1
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"oneOf": [
{
"properties": {
"api_key": {"type": "string"},
}
},
{
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
}
},
],
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 1],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"",
),
],
)
def test_validate_oauth_flow(connector_spec, expected_error):
t = _TestSpec()
if expected_error:
with pytest.raises(AssertionError, match=expected_error):
t.test_oauth_flow_parameters(connector_spec)
else:
t.test_oauth_flow_parameters(connector_spec)
@pytest.mark.parametrize(
"records, configured_catalog, expected_error",
[
(
[AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111)],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{"name": "test1", "json_schema": {"type": "object", "properties": {"f1": {"type": "string"}}}}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
),
"",
),
(
[AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111)],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {"type": "object", "properties": {"f1": {"type": "string"}, "f2": {"type": "string"}}},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
),
r"`test1` stream has `\['/f2'\]`",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f2": "v2"}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {"type": "object", "properties": {"f1": {"type": "string"}, "f2": {"type": "string"}}},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
),
"",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f2": "v2", "f3": [1, 2, 3]}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {"type": "array", "items": {"type": "integer"}},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
),
"",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f2": "v2", "f3": []}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {"type": "array", "items": {"type": "integer"}},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
),
r"`test1` stream has `\['/f3/\[\]'\]`",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f2": "v2", "f3": {"f4": "v4", "f5": [1, 2]}}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {"type": "object", "properties": {"f4": {"type": "string"}, "f5": {"type": "array"}}},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
),
"",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f2": "v2", "f3": {"f4": "v4"}}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {"type": "object", "properties": {"f4": {"type": "string"}, "f5": {"type": "array"}}},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
),
r"`test1` stream has `\['/f3/f5/\[\]'\]`",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f2": "v2", "f3": {"f4": "v4"}}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {
"type": "object",
"properties": {
"f4": {"type": "string"},
"f5": {
"type": "array",
"items": {
"type": "object",
"properties": {
"f6": {"type": "string"},
"f7": {"type": "array"},
},
},
},
},
},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
),
r"`test1` stream has `\['/f3/f5/\[\]/f6', '/f3/f5/\[\]/f7/\[\]'\]`",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111),
AirbyteRecordMessage(
stream="test1", data={"f2": "v2", "f3": {"f4": "v4", "f5": [{"f6": "v6", "f7": ["a", "b"]}]}}, emitted_at=111
),
AirbyteRecordMessage(stream="test2", data={"f8": "v8"}, emitted_at=111),
AirbyteRecordMessage(stream="test2", data={"f9": "v9"}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {
"type": "object",
"properties": {
"f4": {"type": "string"},
"f5": {
"type": "array",
"items": {
"type": "object",
"properties": {
"f6": {"type": "string"},
"f7": {"type": "array"},
},
},
},
},
},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
),
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test2",
"json_schema": {"type": "object", "properties": {"f8": {"type": "string"}, "f9": {"type": "string"}}},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
),
]
),
"",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f2": "v2", "f3": {"f4": "v4", "f5": [{"f6": "v6", "f7": []}]}}, emitted_at=111),
AirbyteRecordMessage(stream="test2", data={}, emitted_at=111),
AirbyteRecordMessage(stream="test2", data={"f9": "v9"}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {
"type": "object",
"properties": {
"f4": {"type": "string"},
"f5": {
"type": "array",
"items": {
"type": "object",
"properties": {
"f6": {"type": "string"},
"f7": {"type": "array"},
},
},
},
},
},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
),
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test2",
"json_schema": {"type": "object", "properties": {"f8": {"type": "string"}, "f9": {"type": "string"}}},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
),
]
),
r"(`test1` stream has `\['/f3/f5/\[\]/f7/\[\]']`)|(`test2` `\['/f8'\]`)",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1", "f2": "v2"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f3": {"f4": "v4"}}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {
"oneOf": [
{"type": "object", "properties": {"f4": {"type": "string"}}},
{"type": "object", "properties": {"f5": {"type": "array"}}},
]
},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
),
]
),
"",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1", "f2": "v2"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f3": {"f5": {"f7": "v7"}}}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {
"oneOf": [
{"type": "object", "properties": {"f4": {"type": "string"}}},
{
"type": "object",
"properties": {
"f5": {
"oneOf": [
{"type": "object", "properties": {"f6": {"type": "string"}}},
{"type": "object", "properties": {"f7": {"type": "string"}}},
]
}
},
},
]
},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
),
]
),
"",
),
(
[
AirbyteRecordMessage(stream="test1", data={"f1": "v1", "f2": "v2"}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={}, emitted_at=111),
AirbyteRecordMessage(stream="test1", data={"f3": {"f5": {}}}, emitted_at=111),
],
ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj(
{
"name": "test1",
"json_schema": {
"type": "object",
"properties": {
"f1": {"type": "string"},
"f2": {"type": "string"},
"f3": {
"oneOf": [
{"type": "object", "properties": {"f4": {"type": "string"}}},
{
"type": "object",
"properties": {
"f5": {
"anyOf": [
{"type": "object", "properties": {"f6": {"type": "string"}}},
{"type": "object", "properties": {"f7": {"type": "string"}}},
]
}
},
},
]
},
},
},
}
),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
),
]
),
r"`test1` stream has `\['/f3\(0\)/f4', '/f3\(1\)/f5\(0\)/f6', '/f3\(1\)/f5\(1\)/f7'\]`",
),
],
)
def test_validate_field_appears_at_least_once(records, configured_catalog, expected_error):
t = _TestBasicRead()
if expected_error:
with pytest.raises(AssertionError, match=expected_error):
t._validate_field_appears_at_least_once(records=records, configured_catalog=configured_catalog)
else:
t._validate_field_appears_at_least_once(records=records, configured_catalog=configured_catalog)
| 44.753477
| 140
| 0.328192
| 1,850
| 35,400
| 6.101622
| 0.099459
| 0.069986
| 0.085046
| 0.083717
| 0.84798
| 0.828225
| 0.80776
| 0.778437
| 0.758062
| 0.758062
| 0
| 0.023582
| 0.544802
| 35,400
| 790
| 141
| 44.810127
| 0.676927
| 0.010254
| 0
| 0.625326
| 0
| 0.001305
| 0.1666
| 0.01262
| 0
| 0
| 0
| 0
| 0.006527
| 1
| 0.006527
| false
| 0
| 0.009138
| 0
| 0.015666
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
811356f07ab15bd8c6b602914a20a9c360fd0f41
| 93
|
py
|
Python
|
miniast/util.py
|
cpcloud/miniast
|
8677752f70a1bbfc9dee3c3a742ea99790b9659e
|
[
"Apache-2.0"
] | 16
|
2018-02-24T00:07:44.000Z
|
2018-09-13T21:39:25.000Z
|
miniast/util.py
|
cpcloud/miniast
|
8677752f70a1bbfc9dee3c3a742ea99790b9659e
|
[
"Apache-2.0"
] | null | null | null |
miniast/util.py
|
cpcloud/miniast
|
8677752f70a1bbfc9dee3c3a742ea99790b9659e
|
[
"Apache-2.0"
] | null | null | null |
import textwrap
def indent(text, spaces=' ' * 4):
return textwrap.indent(text, spaces)
| 15.5
| 40
| 0.688172
| 12
| 93
| 5.333333
| 0.666667
| 0.3125
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013158
| 0.182796
| 93
| 5
| 41
| 18.6
| 0.828947
| 0
| 0
| 0
| 0
| 0
| 0.010753
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
81394cb5cd6664c9334aa59ca9214360bb5120af
| 12,602
|
py
|
Python
|
tests/test_runner.py
|
rahultesla/spectacles
|
dae1e938805fcdae7dea422801916322c0da5608
|
[
"MIT"
] | null | null | null |
tests/test_runner.py
|
rahultesla/spectacles
|
dae1e938805fcdae7dea422801916322c0da5608
|
[
"MIT"
] | null | null | null |
tests/test_runner.py
|
rahultesla/spectacles
|
dae1e938805fcdae7dea422801916322c0da5608
|
[
"MIT"
] | null | null | null |
import string
import jsonschema
import pytest
from unittest.mock import Mock, patch
from spectacles.client import LookerClient
from spectacles.exceptions import ContentError, DataTestError, SqlError
from spectacles.runner import Runner
from utils import build_validation
@pytest.mark.vcr(match_on=["uri", "method", "raw_body"])
@pytest.mark.parametrize("fail_fast", [True, False])
@patch("spectacles.runner.time_hash", side_effect=tuple(string.ascii_lowercase))
def test_validate_sql_should_work(mock_time_hash, looker_client, fail_fast):
runner = Runner(looker_client, "eye_exam")
result = runner.validate_sql(
ref="pytest",
filters=["eye_exam/users", "eye_exam/users__fail"],
fail_fast=fail_fast,
)
assert result["status"] == "failed"
assert result["tested"][0]["status"] == "passed"
assert result["tested"][1]["status"] == "failed"
if fail_fast:
assert len(result["errors"]) == 1
else:
assert len(result["errors"]) > 1
@pytest.mark.vcr(match_on=["uri", "method", "raw_body"])
@patch("spectacles.runner.time_hash", side_effect=tuple(string.ascii_lowercase))
def test_validate_content_should_work(mock_time_hash, looker_client):
runner = Runner(looker_client, "eye_exam")
result = runner.validate_content(filters=["eye_exam/users", "eye_exam/users__fail"])
assert result["status"] == "failed"
assert result["tested"][0]["status"] == "passed"
assert result["tested"][1]["status"] == "failed"
assert len(result["errors"]) > 0
@pytest.mark.vcr(match_on=["uri", "method", "raw_body"])
@patch("spectacles.runner.time_hash", side_effect=tuple(string.ascii_lowercase))
def test_validate_data_tests_should_work(mock_time_hash, looker_client):
runner = Runner(looker_client, "eye_exam")
result = runner.validate_data_tests(
filters=["eye_exam/users", "eye_exam/users__fail"]
)
assert result["status"] == "failed"
assert result["tested"][0]["status"] == "passed"
assert result["tested"][1]["status"] == "failed"
assert len(result["errors"]) > 0
@patch("spectacles.validators.data_test.DataTestValidator.get_tests")
@patch("spectacles.validators.data_test.DataTestValidator.validate")
@patch("spectacles.runner.build_project")
@patch("spectacles.runner.LookerBranchManager")
def test_validate_data_tests_returns_valid_schema(
mock_branch_manager,
mock_build_project,
mock_validate,
mock_get_tests,
project,
model,
explore,
schema,
):
error_message = "An error ocurred"
def add_error_to_project(tests):
project.models[0].explores[0].queried = True
project.models[0].explores[0].errors = [
DataTestError("", "", error_message, "", "", "")
]
model.explores = [explore]
project.models = [model]
mock_build_project.return_value = project
mock_validate.side_effect = add_error_to_project
runner = Runner(client=Mock(spec=LookerClient), project="eye_exam")
result = runner.validate_data_tests()
assert result["status"] == "failed"
assert result["errors"][0]["message"] == error_message
jsonschema.validate(result, schema)
@patch("spectacles.validators.content.ContentValidator.validate")
@patch("spectacles.runner.build_project")
@patch("spectacles.runner.LookerBranchManager")
def test_validate_content_returns_valid_schema(
mock_branch_manager,
mock_build_project,
mock_validate,
project,
model,
explore,
schema,
):
error_message = "An error ocurred"
def add_error_to_project(tests):
project.models[0].explores[0].queried = True
project.models[0].explores[0].errors = [
ContentError("", "", error_message, "", "", "", "", "")
]
model.explores = [explore]
project.models = [model]
mock_build_project.return_value = project
mock_validate.side_effect = add_error_to_project
runner = Runner(client=Mock(spec=LookerClient), project="eye_exam")
result = runner.validate_content()
assert result["status"] == "failed"
assert result["errors"][0]["message"] == error_message
jsonschema.validate(result, schema)
@patch("spectacles.validators.sql.SqlValidator.create_tests")
@patch("spectacles.validators.sql.SqlValidator.run_tests")
@patch("spectacles.runner.build_project")
@patch("spectacles.runner.LookerBranchManager")
def test_validate_sql_returns_valid_schema(
mock_branch_manager,
mock_build_project,
mock_run_tests,
mock_create_tests,
project,
model,
explore,
schema,
):
error_message = "An error ocurred"
def add_error_to_project(tests, profile):
project.models[0].explores[0].queried = True
project.models[0].explores[0].errors = [SqlError("", "", "", "", error_message)]
model.explores = [explore]
project.models = [model]
mock_build_project.return_value = project
mock_run_tests.side_effect = add_error_to_project
runner = Runner(client=Mock(spec=LookerClient), project="eye_exam")
result = runner.validate_sql()
assert result["status"] == "failed"
assert result["errors"][0]["message"] == error_message
jsonschema.validate(result, schema)
@pytest.mark.vcr(match_on=["uri", "method", "raw_body"])
@patch("spectacles.runner.time_hash", side_effect=tuple(string.ascii_lowercase))
def test_incremental_sql_with_equal_explores_should_not_error(
mock_time_hash,
looker_client,
):
"""Case where all explores compile to the same SQL.
We expect all explores to be skipped, returning no errors.
"""
runner = Runner(looker_client, "eye_exam")
result = runner.validate_sql(
incremental=True,
ref="pytest-incremental-equal",
filters=["eye_exam/users", "eye_exam/users__fail"],
fail_fast=False,
)
assert result["status"] == "passed"
assert len(result["errors"]) == 0
@pytest.mark.vcr(match_on=["uri", "method", "raw_body"])
@patch("spectacles.runner.time_hash", side_effect=tuple(string.ascii_lowercase))
def test_incremental_sql_with_diff_explores_and_valid_sql_should_not_error(
mock_time_hash, looker_client
):
"""Case where one explore differs in SQL and has valid SQL.
We expect the differing explore to be tested and return no errors.
"""
runner = Runner(looker_client, "eye_exam")
result = runner.validate_sql(
incremental=True,
ref="pytest-incremental-valid-diff",
filters=["eye_exam/users", "eye_exam/users__fail"],
fail_fast=False,
)
assert result["status"] == "passed"
assert result["tested"][0]["explore"] == "users"
assert result["tested"][0]["status"] == "passed"
assert result["tested"][1]["explore"] == "users__fail"
assert result["tested"][1]["status"] == "skipped"
assert len(result["errors"]) == 0
@pytest.mark.vcr(match_on=["uri", "method", "raw_body"])
@patch("spectacles.runner.time_hash", side_effect=tuple(string.ascii_lowercase))
def test_incremental_sql_with_diff_explores_and_invalid_sql_should_error(
mock_time_hash, looker_client
):
"""Case where one explore differs in SQL and has one SQL error.
We expect the differing explore to be tested and return one error.
"""
runner = Runner(looker_client, "eye_exam")
result = runner.validate_sql(
incremental=True,
ref="pytest-incremental-invalid-diff",
filters=["eye_exam/users", "eye_exam/users__fail"],
fail_fast=False,
)
assert result["status"] == "failed"
assert result["tested"][0]["explore"] == "users"
assert result["tested"][0]["status"] == "failed"
assert result["tested"][1]["explore"] == "users__fail"
assert result["tested"][1]["status"] == "skipped"
assert len(result["errors"]) == 1
@pytest.mark.vcr(match_on=["uri", "method", "raw_body"])
@patch("spectacles.runner.time_hash", side_effect=tuple(string.ascii_lowercase))
def test_incremental_sql_with_diff_explores_and_invalid_diff_sql_should_error(
mock_time_hash, looker_client
):
"""Case where one explore differs in SQL and has two SQL errors, one present in
the target branch, one not present in the target branch.
We expect the differing explore to be tested and return one error.
"""
runner = Runner(looker_client, "eye_exam")
result = runner.validate_sql(
incremental=True,
ref="pytest-incremental-invalid-equal",
filters=["eye_exam/users", "eye_exam/users__fail"],
fail_fast=False,
)
assert result["status"] == "failed"
assert result["tested"][0]["explore"] == "users"
assert result["tested"][0]["status"] == "skipped"
assert result["tested"][1]["explore"] == "users__fail"
assert result["tested"][1]["status"] == "failed"
assert len(result["errors"]) == 1
@pytest.mark.vcr(match_on=["uri", "method", "raw_body"])
@patch("spectacles.runner.time_hash", side_effect=tuple(string.ascii_lowercase))
def test_incremental_sql_with_diff_explores_and_invalid_existing_sql_should_error(
mock_time_hash, looker_client
):
"""Case where the target branch has many errors, one of which is fixed on the base
branch.
We expect the differing explore to be tested and return no errors, since the
remaining errors already exist for the target.
"""
runner = Runner(looker_client, "eye_exam")
result = runner.validate_sql(
incremental=True,
target="pytest-incremental-dirty-prod",
ref="pytest-incremental-fix-prod",
filters=["eye_exam/users", "eye_exam/users__fail"],
fail_fast=False,
)
assert result["status"] == "passed"
assert result["tested"][0]["explore"] == "users"
assert result["tested"][0]["status"] == "skipped"
assert result["tested"][1]["explore"] == "users__fail"
assert result["tested"][1]["status"] == "passed"
assert len(result["errors"]) == 0
def test_incremental_same_results_should_not_have_errors():
base = build_validation("content")
target = build_validation("content")
diff = Runner._incremental_results(base, target)
assert diff["status"] == "passed"
assert diff["errors"] == []
assert diff["tested"] == [
dict(model="ecommerce", explore="orders", status="passed"),
dict(model="ecommerce", explore="sessions", status="passed"),
dict(model="ecommerce", explore="users", status="passed"),
]
def test_incremental_with_fewer_errors_than_target():
base = build_validation("content")
target = build_validation("content")
base["tested"][2]["status"] = "passed"
base["errors"] = []
diff = Runner._incremental_results(base, target)
assert diff["status"] == "passed"
assert diff["errors"] == []
assert diff["tested"] == [
dict(model="ecommerce", explore="orders", status="passed"),
dict(model="ecommerce", explore="sessions", status="passed"),
dict(model="ecommerce", explore="users", status="passed"),
]
def test_incremental_with_more_errors_than_target():
base = build_validation("content")
target = build_validation("content")
base["tested"][1]["status"] = "failed"
extra_errors = [
dict(
model="ecommerce",
explore="users",
test=None,
message="Another error occurred",
metadata={},
),
dict(
model="ecommerce",
explore="sessions",
test=None,
message="An error occurred",
metadata={},
),
]
base["errors"].extend(extra_errors)
diff = Runner._incremental_results(base, target)
assert diff["status"] == "failed"
assert diff["errors"] == extra_errors
assert diff["tested"] == [
dict(model="ecommerce", explore="orders", status="passed"),
dict(model="ecommerce", explore="sessions", status="failed"),
dict(model="ecommerce", explore="users", status="failed"),
]
def test_incremental_with_fewer_tested_explores_than_target():
base = build_validation("content")
target = build_validation("content")
_ = base["tested"].pop(0)
extra_error = dict(
model="ecommerce",
explore="users",
test=None,
message="Another error occurred",
metadata={},
)
base["errors"].append(extra_error)
diff = Runner._incremental_results(base, target)
assert diff["status"] == "failed"
assert diff["errors"] == [extra_error]
assert diff["tested"] == [
dict(model="ecommerce", explore="sessions", status="passed"),
dict(model="ecommerce", explore="users", status="failed"),
]
| 36.212644
| 88
| 0.677829
| 1,518
| 12,602
| 5.398551
| 0.102108
| 0.052715
| 0.048322
| 0.042709
| 0.885662
| 0.855278
| 0.841001
| 0.823429
| 0.81684
| 0.812447
| 0
| 0.004721
| 0.176401
| 12,602
| 347
| 89
| 36.317003
| 0.784854
| 0.061657
| 0
| 0.701389
| 0
| 0
| 0.223471
| 0.073497
| 0
| 0
| 0
| 0
| 0.197917
| 1
| 0.0625
| false
| 0.065972
| 0.027778
| 0
| 0.090278
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
d4b4138d1d15f22309edfcc33b8140747e063307
| 20,630
|
py
|
Python
|
pmakeup/plugins/operating_system/OperatingSystemPMakeupPlugin.py
|
Koldar/pmake
|
85aca559b23cabc77dd657c5b645e26472b9e900
|
[
"Apache-2.0"
] | null | null | null |
pmakeup/plugins/operating_system/OperatingSystemPMakeupPlugin.py
|
Koldar/pmake
|
85aca559b23cabc77dd657c5b645e26472b9e900
|
[
"Apache-2.0"
] | null | null | null |
pmakeup/plugins/operating_system/OperatingSystemPMakeupPlugin.py
|
Koldar/pmake
|
85aca559b23cabc77dd657c5b645e26472b9e900
|
[
"Apache-2.0"
] | null | null | null |
import os
from typing import Union, List, Dict, Tuple, Any, Iterable
import pmakeup as pm
class OperatingSystemPMakeupPlugin(pm.AbstractPmakeupPlugin):
def _setup_plugin(self):
pass
def _teardown_plugin(self):
pass
def _get_dependencies(self) -> Iterable[type]:
return []
@pm.register_command.add("operating system")
def is_program_installed(self, program_name: str) -> bool:
"""
Check if a program is reachable via commandline. We will look **only** in the PATH environment variable.
If you want to look in other parts as well, conside rusing
:param program_name: the name of the program (e.g., dot)
:return: true if there is a program accessible to the PATH with the given name, false otherwise
"""
self._log_command(f"""Checking if the executable \"{program_name}\" is in PATH""")
return self.platform.is_program_installed(program_name)
@pm.register_command.add("operating system")
def get_program_path(self) -> Iterable[pm.path]:
"""
List of paths in PATH environment variable
:return: collections of path
"""
return self.platform.get_program_path()
@pm.register_command.add("operating system")
def current_user(self) -> str:
"""
get the user currently logged
:return: the user currently logged
"""
return self.platform.get_current_username()
@pm.register_command.add("operating system")
def execute_and_run_in_background(self, commands: Union[str, List[Union[str, List[str]]]], cwd: pm.path = None, env: Dict[str, str] = None) -> int:
"""
Execute a command but ensure that no stdout will be printed on the console
:param commands: the command to execute. They will be exeucte in the same context
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:return: pid of running process
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
result = self.platform.fire_command_and_forget(
commands=commands,
cwd=cwd,
env=env,
log_entry=True,
)
return result
@pm.register_command.add("operating system")
def execute_and_forget(self, commands: Union[str, List[Union[str, List[str]]]], cwd: pm.path = None,
env: Dict[str, str] = None, check_exit_code: bool = True, timeout: int = None) -> int:
"""
Execute a command but ensure that no stdout will be printed on the console
:param commands: the command to execute. They will be exeucte in the same context
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:param check_exit_code: if true, we will generate an exception if the exit code is different than 0
:param timeout: if positive, we will give up waiting for the command after the amount of seconds
:return: triple. The first element is the error code, the second is the stdout (if captured), the third is stderr
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
result = self.platform.fire_command_and_wait(
commands=commands,
cwd=cwd,
env=env,
check_exit_code=check_exit_code,
timeout=timeout,
log_entry=True,
)
return result
@pm.register_command.add("operating system")
def execute_stdout_on_screen(self, commands: Union[str, List[Union[str, List[str]]]], cwd: pm.path = None,
env: Dict[str, Any] = None, check_exit_code: bool = True, timeout: int = None) -> int:
"""
Execute a command. We won't capture the stdout but we will show it on pmakeup console
:param commands: the command to execute. They will be exeucte in the same context
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:param check_exit_code: if true, we will generate an exception if the exit code is different than 0
:param timeout: if positive, we will give up waiting for the command after the amount of seconds
:return: triple. The first element is the error code, the second is the stdout (if captured), the third is stderr
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
result = self.platform.fire_command_and_show_stdout(
commands=commands,
cwd=cwd,
env=env,
check_exit_code=check_exit_code,
timeout=timeout,
log_entry=True,
)
return result
@pm.register_command.add("operating system")
def execute_return_stdout(self, commands: Union[str, List[Union[str, List[str]]]], cwd: pm.path = None,
env: Dict[str, Any] = None,
check_exit_code: bool = True, timeout: int = None) -> Tuple[int, str, str]:
"""
Execute a command. We won't show the stdout on pmakeup console but we will capture it and returned it
:param commands: the command to execute. They will be exeucte in the same context
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:param check_exit_code: if true, we will generate an exception if the exit code is different than 0
:param timeout: if positive, we will give up waiting for the command after the amount of seconds
:return: triple. The first element is the error code, the second is the stdout (if captured), the third is stderr
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
exit_code, stdout, stderr = self.platform.fire_command_and_capture_stdout(
commands=commands,
cwd=cwd,
env=env,
check_exit_code=check_exit_code,
timeout=timeout,
log_entry=True
)
return exit_code, stdout, stderr
@pm.register_command.add("operating system")
def execute_admin_and_run_in_background(self, commands: Union[str, List[Union[str, List[str]]]], cwd: pm.path = None, env: Dict[str, Any] = None) -> int:
"""
Execute a command as admin but ensure that no stdout will be printed on the console
:param commands: the command to execute. They will be exeucte in the same context
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:return: pid of running process
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
result = self.platform.fire_admin_command_and_forget(
commands=commands,
cwd=cwd,
env=env,
credential_type="password",
credential=None,
log_entry=True,
)
return result
@pm.register_command.add("operating system")
def execute_admin_and_forget(self, commands: Union[str, List[Union[str, List[str]]]], cwd: pm.path = None,
env: Dict[str, Any] = None,
check_exit_code: bool = True, timeout: int = None) -> int:
"""
Execute a command as admin but ensure that no stdout will be printed on the console
:param commands: the command to execute. They will be exeucte in the same context
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:param check_exit_code: if true, we will generate an exception if the exit code is different than 0
:param timeout: if positive, we will give up waiting for the command after the amount of seconds
:return: triple. The first element is the error code, the second is the stdout (if captured), the third is stderr
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
result = self.platform.fire_admin_command_and_wait(
commands=commands,
cwd=cwd,
env=env,
check_exit_code=check_exit_code,
timeout=timeout,
credential_type="password",
credential=None,
log_entry=True,
)
return result
@pm.register_command.add("operating system")
def execute_admin_stdout_on_screen(self, commands: Union[str, List[Union[str, List[str]]]], cwd: pm.path = None,
env: Dict[str, Any] = None,
check_exit_code: bool = True, timeout: int = None) -> int:
"""
Execute a command as an admin. We won't capture the stdout but we will show it on pmakeup console
:param commands: the command to execute. They will be execute in the same context
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:param check_exit_code: if true, we will generate an exception if the exit code is different than 0
:param timeout: if positive, we will give up waiting for the command after the amount of seconds
:return: triple. The first element is the error code, the second is the stdout (if captured),
the third is stderr
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
result = self.platform.fire_admin_command_and_show_stdout(
commands=commands,
cwd=cwd,
env=env,
check_exit_code=check_exit_code,
timeout=timeout,
credential_type="password",
credential=None,
log_entry=True,
)
return result
@pm.register_command.add("operating system")
def execute_admin_return_stdout(self, commands: Union[str, List[Union[str, List[str]]]], cwd: pm.path = None,
env: Dict[str, Any] = None,
check_exit_code: bool = True, timeout: int = None) -> Tuple[int, str, str]:
"""
Execute a command as an admin. We won't show the stdout on pmakeup console but we will capture it and returned it
:param commands: the command to execute. They will be execute in the same context
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:param check_exit_code: if true, we will generate an exception if the exit code is different than 0
:param timeout: if positive, we will give up waiting for the command after the amount of seconds
:return: triple. The first element is the error code, the second is the stdout (if captured),
the third is stderr
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
exit_code, stdout, stderr = self.platform.fire_admin_command_and_capture_stdout(
commands=commands,
cwd=cwd,
env=env,
check_exit_code=check_exit_code,
timeout=timeout,
credential_type="password",
credential=None,
log_entry=True
)
return exit_code, stdout, stderr
@pm.register_command.add("operating system")
def execute_admin_with_password_and_run_in_background(self, commands: Union[str, List[Union[str, List[str]]]], password: str, cwd: pm.path = None,
env: Dict[str, Any] = None) -> int:
"""
Execute a command as admin but ensure that no stdout will be printed on the console
:param commands: the command to execute. They will be exeucte in the same context
:param password: password of the user to invoke the program as an admin
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:return: triple. The first element is the error code, the second is the stdout (if captured), the third is stderr
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
result, _, _ = self.platform.fire_admin_command_and_forget(
commands=commands,
cwd=cwd,
env=env,
credential_type="password",
credential=password,
log_entry=True,
)
return result
@pm.register_command.add("operating system")
def execute_admin_with_password_fire_and_forget(self, commands: Union[str, List[Union[str, List[str]]]],
password: str,
cwd: pm.path = None, env: Dict[str, Any] = None,
check_exit_code: bool = True, timeout: int = None) -> int:
"""
Execute a command as admin by providing the admin password. **THIS IS INCREDIBLE UNSAFE!!!!!!!!!!!!**.
Please, I beg you, do **NOT** use this if you need any level of security!!!!! This will make the password visible
on top, on the history, everywhere on your system. Please use it only if you need to execute a command on your
local machine.
:param commands: the command to execute. They will be executed in the same context
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:param check_exit_code: if true, we will generate an exception if the exit code is different than 0
:param timeout: if positive, we will give up waiting for the command after the amount of seconds
:param password: **[UNSAFE!!!!]** If you **really** need, you might want to run a command as an admin
only on your laptop, and you want a really quick and dirty way to execute it, like as in the shell.
Do **not** use this in production code, since the password will be 'printed in clear basically everywhere!
(e.g., history, system monitor, probably in a file as well)
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
result, _, _ = self.platform.fire_admin_command_and_wait(
commands=commands,
cwd=cwd,
env=env,
check_exit_code=check_exit_code,
timeout=timeout,
credential_type="password",
credential=password,
log_entry=True,
)
return result
@pm.register_command.add("operating system")
def execute_admin_with_password_stdout_on_screen(self, commands: Union[str, List[Union[str, List[str]]]],
password: str, cwd: pm.path = None, env: Dict[str, Any] = None,
check_exit_code: bool = True, timeout: int = None) -> int:
"""
Execute a command as an admin. We won't capture the stdout but we will show it on pmakeup console
:param commands: the command to execute. They will be execute in the same context
:param password: **[UNSAFE!!!!]** If you **really** need, you might want to run a command as an admin
only on your laptop, and you want a really quick and dirty way to execute it, like as in the shell.
Do **not** use this in production code, since the password will be 'printed in clear basically everywhere!
(e.g., history, system monitor, probably in a file as well)
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:param check_exit_code: if true, we will generate an exception if the exit code is different than 0
:param timeout: if positive, we will give up waiting for the command after the amount of seconds
:return: triple. The first element is the error code, the second is the stdout (if captured),
the third is stderr
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
result = self.platform.fire_admin_command_and_show_stdout(
commands=commands,
cwd=cwd,
env=env,
check_exit_code=check_exit_code,
timeout=timeout,
credential_type="password",
credential=password,
log_entry=True,
)
return result
@pm.register_command.add("operating system")
def execute_admin_with_password_return_stdout(self, commands: Union[str, List[Union[str, List[str]]]],
password: str, cwd: pm.path = None, env: Dict[str, Any] = None,
check_exit_code: bool = True,
timeout: int = None) -> Tuple[int, str, str]:
"""
Execute a command as an admin. We won't show the stdout on pmakeup console but we will capture it and returned it
:param commands: the command to execute. They will be execute in the same context
:param password: **[UNSAFE!!!!]** If you **really** need, you might want to run a command as an admin
only on your laptop, and you want a really quick and dirty way to execute it, like as in the shell.
Do **not** use this in production code, since the password will be 'printed in clear basically everywhere!
(e.g., history, system monitor, probably in a file as well)
:param cwd: current working directory where the command is executed
:param env: a dictionary representing the key-values of the environment variables
:param check_exit_code: if true, we will generate an exception if the exit code is different than 0
:param timeout: if positive, we will give up waiting for the command after the amount of seconds
:return: triple. The first element is the error code, the second is the stdout (if captured),
the third is stderr
"""
if cwd is None:
cwd = self.paths.cwd()
else:
cwd = self.paths.abs_path(cwd)
if isinstance(commands, str):
commands = [commands]
exit_code, stdout, stderr = self.platform.fire_admin_command_and_capture_stdout(
commands=commands,
cwd=cwd,
env=env,
check_exit_code=check_exit_code,
timeout=timeout,
credential_type="password",
credential=password,
log_entry=True
)
return exit_code, stdout, stderr
OperatingSystemPMakeupPlugin.autoregister()
| 45.641593
| 157
| 0.615366
| 2,679
| 20,630
| 4.644644
| 0.074655
| 0.03279
| 0.037612
| 0.02411
| 0.911597
| 0.907096
| 0.906453
| 0.897292
| 0.895684
| 0.88596
| 0
| 0.000632
| 0.310228
| 20,630
| 451
| 158
| 45.742794
| 0.873788
| 0.42191
| 0
| 0.783465
| 0
| 0
| 0.033149
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070866
| false
| 0.082677
| 0.011811
| 0.003937
| 0.149606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
d4cb3e27d0aae70a21e2a2d4430c5c175cd0cc19
| 64,906
|
py
|
Python
|
tensorflow/python/estimator/canned/boosted_trees_test.py
|
hsm207/tensorflow
|
8ab4678ba216c3ec8fa32f417cb667b056689939
|
[
"Apache-2.0"
] | 1
|
2018-11-18T01:52:29.000Z
|
2018-11-18T01:52:29.000Z
|
tensorflow/python/estimator/canned/boosted_trees_test.py
|
hsm207/tensorflow
|
8ab4678ba216c3ec8fa32f417cb667b056689939
|
[
"Apache-2.0"
] | 1
|
2018-09-27T06:03:32.000Z
|
2018-09-27T06:03:32.000Z
|
tensorflow/python/estimator/canned/boosted_trees_test.py
|
hsm207/tensorflow
|
8ab4678ba216c3ec8fa32f417cb667b056689939
|
[
"Apache-2.0"
] | 6
|
2018-12-20T01:35:20.000Z
|
2020-07-10T17:29:57.000Z
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests boosted_trees estimators and model_fn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.kernels.boosted_trees import boosted_trees_pb2
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.estimator import model_fn
from tensorflow.python.estimator import run_config
from tensorflow.python.estimator.canned import boosted_trees
from tensorflow.python.estimator.inputs import numpy_io
from tensorflow.python.feature_column import feature_column
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import gen_boosted_trees_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training import checkpoint_utils
from tensorflow.python.training import session_run_hook
NUM_FEATURES = 3
BUCKET_BOUNDARIES = [-2., .5, 12.] # Boundaries for all the features.
INPUT_FEATURES = np.array(
[
[12.5, 1.0, -2.001, -2.0001, -1.999], # feature_0 quantized:[3,2,0,0,1]
[2.0, -3.0, 0.5, 0.0, 0.4995], # feature_1 quantized:[2,0,2,1,1]
[3.0, 20.0, 50.0, -100.0, 102.75], # feature_2 quantized:[2,3,3,0,3]
],
dtype=np.float32)
CLASSIFICATION_LABELS = [[0.], [1.], [1.], [0.], [0.]]
REGRESSION_LABELS = [[1.5], [0.3], [0.2], [2.], [5.]]
FEATURES_DICT = {'f_%d' % i: INPUT_FEATURES[i] for i in range(NUM_FEATURES)}
# EXAMPLE_ID is not exposed to Estimator yet, but supported at model_fn level.
EXAMPLE_IDS = np.array([0, 1, 2, 3, 4], dtype=np.int64)
EXAMPLE_ID_COLUMN = '__example_id__'
def _make_train_input_fn(is_classification):
"""Makes train input_fn for classification/regression."""
def _input_fn():
features_dict = dict(FEATURES_DICT) # copies the dict to add an entry.
features_dict[EXAMPLE_ID_COLUMN] = constant_op.constant(EXAMPLE_IDS)
labels = CLASSIFICATION_LABELS if is_classification else REGRESSION_LABELS
return features_dict, labels
return _input_fn
def _make_train_input_fn_dataset(is_classification, batch=None, repeat=None):
"""Makes input_fn using Dataset."""
def _input_fn():
features_dict = dict(FEATURES_DICT) # copies the dict to add an entry.
features_dict[EXAMPLE_ID_COLUMN] = constant_op.constant(EXAMPLE_IDS)
labels = CLASSIFICATION_LABELS if is_classification else REGRESSION_LABELS
if batch:
ds = dataset_ops.Dataset.zip(
(dataset_ops.Dataset.from_tensor_slices(features_dict),
dataset_ops.Dataset.from_tensor_slices(labels))).batch(batch)
else:
ds = dataset_ops.Dataset.zip(
(dataset_ops.Dataset.from_tensors(features_dict),
dataset_ops.Dataset.from_tensors(labels)))
# repeat indefinitely by default, or stop at the given step.
ds = ds.repeat(repeat)
return ds
return _input_fn
class BoostedTreesEstimatorTest(test_util.TensorFlowTestCase):
def setUp(self):
self._feature_columns = {
feature_column.bucketized_column(
feature_column.numeric_column('f_%d' % i, dtype=dtypes.float32),
BUCKET_BOUNDARIES)
for i in range(NUM_FEATURES)
}
def _assert_checkpoint(self, model_dir, global_step, finalized_trees,
attempted_layers):
self._assert_checkpoint_and_return_model(model_dir, global_step,
finalized_trees, attempted_layers)
def _assert_checkpoint_and_return_model(self, model_dir, global_step,
finalized_trees, attempted_layers):
reader = checkpoint_utils.load_checkpoint(model_dir)
self.assertEqual(global_step, reader.get_tensor(ops.GraphKeys.GLOBAL_STEP))
serialized = reader.get_tensor('boosted_trees:0_serialized')
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertEqual(
finalized_trees,
sum([1 for t in ensemble_proto.tree_metadata if t.is_finalized]))
self.assertEqual(attempted_layers,
ensemble_proto.growing_metadata.num_layers_attempted)
return ensemble_proto
def testFirstCheckpointWorksFine(self):
"""Tests that eval/pred doesn't crash with the very first checkpoint.
The step-0 checkpoint will have only an empty ensemble, and a separate eval
job might read from it and crash.
This test ensures that prediction/evaluation works fine with it.
"""
input_fn = _make_train_input_fn(is_classification=True)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
class BailOutWithoutTraining(session_run_hook.SessionRunHook):
def before_run(self, run_context):
raise StopIteration('to bail out.')
est.train(input_fn, steps=100, # must stop at 0 anyway.
hooks=[BailOutWithoutTraining()])
self._assert_checkpoint(
est.model_dir, global_step=0, finalized_trees=0, attempted_layers=0)
# Empty ensemble returns 0 logits, so that all output labels are 0.
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['accuracy'], 0.6)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [0], [0], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainAndEvaluateBinaryClassifier(self):
input_fn = _make_train_input_fn(is_classification=True)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(input_fn, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
def testTrainTwiceAndEvaluateBinaryClassifier(self):
input_fn = _make_train_input_fn(is_classification=True)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=5,
max_depth=10)
num_steps = 2
# Train for a few steps, and validate final checkpoint.
est.train(input_fn, steps=num_steps)
est.train(input_fn, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=num_steps * 2,
finalized_trees=0, attempted_layers=4)
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
def testInferBinaryClassifier(self):
train_input_fn = _make_train_input_fn(is_classification=True)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(train_input_fn, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [1], [1], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainClassifierWithRankOneLabel(self):
"""Tests that label with rank-1 tensor is also accepted by classifier."""
def _input_fn_with_rank_one_label():
return FEATURES_DICT, [0., 1., 1., 0., 0.]
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(_input_fn_with_rank_one_label, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=_input_fn_with_rank_one_label, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
def testTrainClassifierWithLabelVocabulary(self):
apple, banana = 'apple', 'banana'
def _input_fn_with_label_vocab():
return FEATURES_DICT, [[apple], [banana], [banana], [apple], [apple]]
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5,
label_vocabulary=[apple, banana])
est.train(input_fn=_input_fn_with_label_vocab, steps=5)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=_input_fn_with_label_vocab, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [1], [1], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainClassifierWithIntegerLabel(self):
def _input_fn_with_integer_label():
return (FEATURES_DICT,
constant_op.constant([[0], [1], [1], [0], [0]], dtypes.int32))
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(input_fn=_input_fn_with_integer_label, steps=5)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=_input_fn_with_integer_label, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [1], [1], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainClassifierWithDataset(self):
train_input_fn = _make_train_input_fn_dataset(is_classification=True)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(train_input_fn, steps=100) # will stop after 5 steps anyway.
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [1], [1], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainAndEvaluateRegressor(self):
input_fn = _make_train_input_fn(is_classification=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=2,
max_depth=5)
# It will stop after 10 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(input_fn, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=10, finalized_trees=2, attempted_layers=10)
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 1.008551)
def testInferRegressor(self):
train_input_fn = _make_train_input_fn(is_classification=False)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(train_input_fn, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithRankOneLabel(self):
"""Tests that label with rank-1 tensor is also accepted by regressor."""
def _input_fn_with_rank_one_label():
return FEATURES_DICT, [1.5, 0.3, 0.2, 2., 5.]
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(_input_fn_with_rank_one_label, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=_input_fn_with_rank_one_label, steps=1)
self.assertAllClose(eval_res['average_loss'], 2.478283)
def testTrainRegressorWithDataset(self):
train_input_fn = _make_train_input_fn_dataset(is_classification=False)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(train_input_fn, steps=100) # will stop after 5 steps anyway.
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 2.478283)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithDatasetBatch(self):
# The batch_size as the entire data size should yield the same result as
# dataset without batching.
train_input_fn = _make_train_input_fn_dataset(
is_classification=False, batch=5)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(train_input_fn, steps=100) # will stop after 5 steps anyway.
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 2.478283)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithDatasetLargerBatch(self):
# The batch_size as the multiple of the entire data size should still yield
# the same result.
train_input_fn = _make_train_input_fn_dataset(
is_classification=False, batch=15)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(train_input_fn, steps=100) # will stop after 5 steps anyway.
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 2.478283)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithDatasetSmallerBatch(self):
# Even when using small batches, if (n_batches_per_layer * batch_size) makes
# the same entire data size, the result should be the same.
train_input_fn = _make_train_input_fn_dataset(
is_classification=False, batch=1)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=5,
n_trees=1,
max_depth=5)
# Train stops after (n_batches_per_layer * n_trees * max_depth) steps.
est.train(train_input_fn, steps=100)
self._assert_checkpoint(
est.model_dir, global_step=25, finalized_trees=1, attempted_layers=5)
# 5 batches = one epoch.
eval_res = est.evaluate(input_fn=train_input_fn, steps=5)
self.assertAllClose(eval_res['average_loss'], 2.478283)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithDatasetWhenInputIsOverEarlier(self):
train_input_fn = _make_train_input_fn_dataset(
is_classification=False, repeat=3) # to stop input after 3 steps.
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# Note that training will stop when input exhausts.
# This might not be a typical pattern, but dataset.repeat(3) causes
# the input stream to cease after 3 steps.
est.train(train_input_fn, steps=100)
self._assert_checkpoint(
est.model_dir, global_step=3, finalized_trees=0, attempted_layers=3)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 3.777295)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.353850], [0.254100], [0.106850], [0.712100], [1.012100]],
[pred['predictions'] for pred in predictions])
def testTrainEvaluateAndPredictWithIndicatorColumn(self):
categorical = feature_column.categorical_column_with_vocabulary_list(
key='categorical', vocabulary_list=('bad', 'good', 'ok'))
feature_indicator = feature_column.indicator_column(categorical)
bucketized_col = feature_column.bucketized_column(
feature_column.numeric_column(
'an_uninformative_feature', dtype=dtypes.float32),
BUCKET_BOUNDARIES)
labels = np.array([[0.], [5.7], [5.7], [0.], [0.]], dtype=np.float32)
# Our categorical feature defines the labels perfectly
input_fn = numpy_io.numpy_input_fn(
x={
'an_uninformative_feature': np.array([1, 1, 1, 1, 1]),
'categorical': np.array(['bad', 'good', 'good', 'ok', 'bad']),
},
y=labels,
batch_size=5,
shuffle=False)
# Train depth 1 tree.
est = boosted_trees.BoostedTreesRegressor(
feature_columns=[bucketized_col, feature_indicator],
n_batches_per_layer=1,
n_trees=1,
learning_rate=1.0,
max_depth=1)
num_steps = 1
est.train(input_fn, steps=num_steps)
ensemble = self._assert_checkpoint_and_return_model(
est.model_dir, global_step=1, finalized_trees=1, attempted_layers=1)
# We learnt perfectly.
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['loss'], 0)
predictions = list(est.predict(input_fn))
self.assertAllClose(
labels,
[pred['predictions'] for pred in predictions])
self.assertEqual(3, len(ensemble.trees[0].nodes))
# Check that the split happened on 'good' value, which will be encoded as
# feature with index 2 (0-numeric, 1 - 'bad')
self.assertEqual(2, ensemble.trees[0].nodes[0].bucketized_split.feature_id)
self.assertEqual(0, ensemble.trees[0].nodes[0].bucketized_split.threshold)
def testTrainEvaluateAndPredictWithOnlyIndicatorColumn(self):
categorical = feature_column.categorical_column_with_vocabulary_list(
key='categorical', vocabulary_list=('bad', 'good', 'ok'))
feature_indicator = feature_column.indicator_column(categorical)
labels = np.array([[0.], [5.7], [5.7], [0.], [0.]], dtype=np.float32)
# Our categorical feature defines the labels perfectly
input_fn = numpy_io.numpy_input_fn(
x={
'categorical': np.array(['bad', 'good', 'good', 'ok', 'bad']),
},
y=labels,
batch_size=5,
shuffle=False)
# Train depth 1 tree.
est = boosted_trees.BoostedTreesRegressor(
feature_columns=[feature_indicator],
n_batches_per_layer=1,
n_trees=1,
learning_rate=1.0,
max_depth=1)
num_steps = 1
est.train(input_fn, steps=num_steps)
ensemble = self._assert_checkpoint_and_return_model(
est.model_dir, global_step=1, finalized_trees=1, attempted_layers=1)
# We learnt perfectly.
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['loss'], 0)
predictions = list(est.predict(input_fn))
self.assertAllClose(
labels,
[pred['predictions'] for pred in predictions])
self.assertEqual(3, len(ensemble.trees[0].nodes))
# Check that the split happened on 'good' value, which will be encoded as
# feature with index 1 (0 - 'bad', 2 - 'ok')
self.assertEqual(1, ensemble.trees[0].nodes[0].bucketized_split.feature_id)
self.assertEqual(0, ensemble.trees[0].nodes[0].bucketized_split.threshold)
def testTreeComplexityIsSetCorrectly(self):
input_fn = _make_train_input_fn(is_classification=True)
num_steps = 10
# Tree complexity is set but no pruning.
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5,
tree_complexity=1e-3)
with self.assertRaisesRegexp(ValueError, 'Tree complexity have no effect'):
est.train(input_fn, steps=num_steps)
# Pruning but no tree complexity.
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5,
pruning_mode='pre')
with self.assertRaisesRegexp(ValueError,
'tree_complexity must be positive'):
est.train(input_fn, steps=num_steps)
# All is good.
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5,
pruning_mode='pre',
tree_complexity=1e-3)
est.train(input_fn, steps=num_steps)
class BoostedTreesDebugOutputsTest(test_util.TensorFlowTestCase):
"""Test debug/model explainability outputs for individual predictions.
Includes directional feature contributions (DFC).
"""
def setUp(self):
self._feature_columns = {
feature_column.bucketized_column(
feature_column.numeric_column('f_%d' % i, dtype=dtypes.float32),
BUCKET_BOUNDARIES) for i in range(NUM_FEATURES)
}
def testBinaryClassifierThatDFCIsInPredictions(self):
train_input_fn = _make_train_input_fn(is_classification=True)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=3, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5,
center_bias=True)
num_steps = 100
# Train for a few steps. Validate debug outputs in prediction dicts.
est.train(train_input_fn, steps=num_steps)
debug_predictions = est.experimental_predict_with_explanations(
predict_input_fn)
biases, dfcs = zip(*[(pred['bias'], pred['dfc'])
for pred in debug_predictions])
self.assertAllClose([0.4] * 5, biases)
self.assertAllClose(({
0: -0.12108613453574479,
1: 0.0,
2: -0.039254929814481143
}, {
0: 0.19650601422250574,
1: 0.0,
2: 0.02693827052766018
}, {
0: 0.16057487356133376,
1: 0.0,
2: 0.02693827052766018
}, {
0: -0.12108613453574479,
1: 0.0,
2: -0.039254929814481143
}, {
0: -0.10832468554550384,
1: 0.0,
2: 0.02693827052766018
}), dfcs)
# Assert sum(dfcs) + bias == probabilities.
expected_probabilities = [
0.23965894, 0.62344426, 0.58751315, 0.23965894, 0.31861359
]
probabilities = [
sum(dfc.values()) + bias for (dfc, bias) in zip(dfcs, biases)
]
self.assertAllClose(expected_probabilities, probabilities)
# When user doesn't include bias or dfc in predict_keys, make sure to still
# include dfc and bias.
debug_predictions = est.experimental_predict_with_explanations(
predict_input_fn, predict_keys=['probabilities'])
for prediction_dict in debug_predictions:
self.assertTrue('bias' in prediction_dict)
self.assertTrue('dfc' in prediction_dict)
self.assertTrue('probabilities' in prediction_dict)
self.assertEqual(len(prediction_dict), 3)
def testRegressorThatDFCIsInPredictions(self):
train_input_fn = _make_train_input_fn(is_classification=False)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5,
center_bias=True)
num_steps = 100
# Train for a few steps. Validate debug outputs in prediction dicts.
est.train(train_input_fn, steps=num_steps)
debug_predictions = est.experimental_predict_with_explanations(
predict_input_fn)
biases, dfcs = zip(*[(pred['bias'], pred['dfc'])
for pred in debug_predictions])
self.assertAllClose([1.8] * 5, biases)
self.assertAllClose(({
0: -0.070499420166015625,
1: -0.095000028610229492,
2: 0.0
}, {
0: -0.53763031959533691,
1: 0.063333392143249512,
2: 0.0
}, {
0: -0.51756942272186279,
1: -0.095000028610229492,
2: 0.0
}, {
0: 0.1563495397567749,
1: 0.063333392143249512,
2: 0.0
}, {
0: 0.96934974193572998,
1: 0.063333392143249512,
2: 0.0
}), dfcs)
# Assert sum(dfcs) + bias == predictions.
expected_predictions = [[1.6345005], [1.32570302], [1.1874305],
[2.01968288], [2.83268309]]
predictions = [
[sum(dfc.values()) + bias] for (dfc, bias) in zip(dfcs, biases)
]
self.assertAllClose(expected_predictions, predictions)
# Test when user doesn't include bias or dfc in predict_keys.
debug_predictions = est.experimental_predict_with_explanations(
predict_input_fn, predict_keys=['predictions'])
for prediction_dict in debug_predictions:
self.assertTrue('bias' in prediction_dict)
self.assertTrue('dfc' in prediction_dict)
self.assertTrue('predictions' in prediction_dict)
self.assertEqual(len(prediction_dict), 3)
class ModelFnTests(test_util.TensorFlowTestCase):
"""Tests bt_model_fn including unexposed internal functionalities."""
def setUp(self):
self._feature_columns = {
feature_column.bucketized_column(
feature_column.numeric_column('f_%d' % i, dtype=dtypes.float32),
BUCKET_BOUNDARIES) for i in range(NUM_FEATURES)
}
def _get_expected_ensembles_for_classification(self):
first_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.387675
}
}
nodes {
leaf {
scalar: -0.181818
}
}
nodes {
leaf {
scalar: 0.0625
}
}
}
tree_weights: 1.0
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 1
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
second_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.387675
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 3
left_id: 3
right_id: 4
}
metadata {
gain: 0.0
original_leaf {
scalar: -0.181818
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.105518
original_leaf {
scalar: 0.0625
}
}
}
nodes {
leaf {
scalar: -0.348397
}
}
nodes {
leaf {
scalar: -0.181818
}
}
nodes {
leaf {
scalar: 0.224091
}
}
nodes {
leaf {
scalar: 0.056815
}
}
}
trees {
nodes {
leaf {
scalar: 0.0
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 0
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 2
last_layer_node_start: 0
last_layer_node_end: 1
}
"""
third_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.387675
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 3
left_id: 3
right_id: 4
}
metadata {
gain: 0.0
original_leaf {
scalar: -0.181818
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.105518
original_leaf {
scalar: 0.0625
}
}
}
nodes {
leaf {
scalar: -0.348397
}
}
nodes {
leaf {
scalar: -0.181818
}
}
nodes {
leaf {
scalar: 0.224091
}
}
nodes {
leaf {
scalar: 0.056815
}
}
}
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 0
left_id: 1
right_id: 2
}
metadata {
gain: 0.287131
}
}
nodes {
leaf {
scalar: 0.162042
}
}
nodes {
leaf {
scalar: -0.086986
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 2
num_layers_attempted: 3
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
return (first_round, second_round, third_round)
def _get_expected_ensembles_for_classification_with_bias(self):
first_round = """
trees {
nodes {
leaf {
scalar: -0.405086
}
}
}
tree_weights: 1.0
tree_metadata {
}
"""
second_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.407711
original_leaf {
scalar: -0.405086
}
}
}
nodes {
leaf {
scalar: -0.556054
}
}
nodes {
leaf {
scalar: -0.301233
}
}
}
tree_weights: 1.0
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 1
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
third_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.407711
original_leaf {
scalar: -0.405086
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 3
left_id: 3
right_id: 4
}
metadata {
original_leaf {
scalar: -0.556054
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.09876
original_leaf {
scalar: -0.301233
}
}
}
nodes {
leaf {
scalar: -0.698072
}
}
nodes {
leaf {
scalar: -0.556054
}
}
nodes {
leaf {
scalar: -0.106016
}
}
nodes {
leaf {
scalar: -0.27349
}
}
}
trees {
nodes {
leaf {
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 2
last_layer_node_end: 1
}
"""
forth_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.4077113
original_leaf {
scalar: -0.405086
}
}
}
nodes {
bucketized_split {
threshold: 3
left_id: 3
right_id: 4
}
metadata {
original_leaf {
scalar: -0.556054
}
}
}
nodes {
bucketized_split {
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.09876
original_leaf {
scalar: -0.301233
}
}
}
nodes {
leaf {
scalar: -0.698072
}
}
nodes {
leaf {
scalar: -0.556054
}
}
nodes {
leaf {
scalar: -0.106016
}
}
nodes {
leaf {
scalar: -0.27349
}
}
}
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.289927
}
}
nodes {
leaf {
scalar: -0.134588
}
}
nodes {
leaf {
scalar: 0.083838
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 1
}
growing_metadata {
num_trees_attempted: 2
num_layers_attempted: 3
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
return (first_round, second_round, third_round, forth_round)
def _get_expected_ensembles_for_regression(self):
first_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.169714
}
}
nodes {
leaf {
scalar: 0.241322
}
}
nodes {
leaf {
scalar: 0.083951
}
}
}
tree_weights: 1.0
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 1
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
second_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.169714
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 1
left_id: 3
right_id: 4
}
metadata {
gain: 2.673407
original_leaf {
scalar: 0.241322
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.324102
original_leaf {
scalar: 0.083951
}
}
}
nodes {
leaf {
scalar: 0.563167
}
}
nodes {
leaf {
scalar: 0.247047
}
}
nodes {
leaf {
scalar: 0.095273
}
}
nodes {
leaf {
scalar: 0.222102
}
}
}
trees {
nodes {
leaf {
scalar: 0.0
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 0
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 2
last_layer_node_start: 0
last_layer_node_end: 1
}
"""
third_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.169714
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 1
left_id: 3
right_id: 4
}
metadata {
gain: 2.673407
original_leaf {
scalar: 0.241322
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.324102
original_leaf {
scalar: 0.083951
}
}
}
nodes {
leaf {
scalar: 0.563167
}
}
nodes {
leaf {
scalar: 0.247047
}
}
nodes {
leaf {
scalar: 0.095273
}
}
nodes {
leaf {
scalar: 0.222102
}
}
}
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 0
left_id: 1
right_id: 2
}
metadata {
gain: 0.981026
}
}
nodes {
leaf {
scalar: 0.005166
}
}
nodes {
leaf {
scalar: 0.180281
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 2
num_layers_attempted: 3
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
return (first_round, second_round, third_round)
def _get_expected_ensembles_for_regression_with_bias(self):
first_round = """
trees {
nodes {
leaf {
scalar: 1.799974
}
}
}
tree_weights: 1.0
tree_metadata {
}
"""
second_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.190442
original_leaf {
scalar: 1.799974
}
}
}
nodes {
leaf {
scalar: 1.862786
}
}
nodes {
leaf {
scalar: 1.706149
}
}
}
tree_weights: 1.0
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 1
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
third_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.190442
original_leaf {
scalar: 1.799974
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 1
left_id: 3
right_id: 4
}
metadata {
gain: 2.683594
original_leaf {
scalar: 1.862786
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.322693
original_leaf {
scalar: 1.706149
}
}
}
nodes {
leaf {
scalar: 2.024487
}
}
nodes {
leaf {
scalar: 1.710319
}
}
nodes {
leaf {
scalar: 1.559208
}
}
nodes {
leaf {
scalar: 1.686037
}
}
}
trees {
nodes {
leaf {
scalar: 0.0
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 0
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 2
last_layer_node_start: 0
last_layer_node_end: 1
}
"""
forth_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.190442
original_leaf {
scalar: 1.799974
}
}
}
nodes {
bucketized_split {
threshold: 1
left_id: 3
right_id: 4
}
metadata {
gain: 2.683594
original_leaf {
scalar: 1.8627863
}
}
}
nodes {
bucketized_split {
left_id: 5
right_id: 6
}
metadata {
gain: 0.322693
original_leaf {
scalar: 1.706149
}
}
}
nodes {
leaf {
scalar: 2.024487
}
}
nodes {
leaf {
scalar: 1.710319
}
}
nodes {
leaf {
scalar: 1.5592078
}
}
nodes {
leaf {
scalar: 1.686037
}
}
}
trees {
nodes {
bucketized_split {
feature_id: 1
left_id: 1
right_id: 2
}
metadata {
gain: 0.972589
}
}
nodes {
leaf {
scalar: -0.137592
}
}
nodes {
leaf {
scalar: 0.034926
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 1
}
growing_metadata {
num_trees_attempted: 2
num_layers_attempted: 3
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
return (first_round, second_round, third_round, forth_round)
def _get_train_op_and_ensemble(self,
head,
config,
is_classification,
train_in_memory,
center_bias=False):
"""Calls bt_model_fn() and returns the train_op and ensemble_serialzed."""
features, labels = _make_train_input_fn(is_classification)()
tree_hparams = boosted_trees._TreeHParams( # pylint:disable=protected-access
n_trees=2,
max_depth=2,
learning_rate=0.1,
l1=0.,
l2=0.01,
tree_complexity=0.,
min_node_weight=0.,
center_bias=center_bias,
pruning_mode='none')
estimator_spec = boosted_trees._bt_model_fn( # pylint:disable=protected-access
features=features,
labels=labels,
mode=model_fn.ModeKeys.TRAIN,
head=head,
feature_columns=self._feature_columns,
tree_hparams=tree_hparams,
example_id_column_name=EXAMPLE_ID_COLUMN,
n_batches_per_layer=1,
config=config,
train_in_memory=train_in_memory)
resources.initialize_resources(resources.shared_resources()).run()
variables.global_variables_initializer().run()
variables.local_variables_initializer().run()
# Gets the train_op and serialized proto of the ensemble.
shared_resources = resources.shared_resources()
self.assertEqual(1, len(shared_resources))
train_op = estimator_spec.train_op
with ops.control_dependencies([train_op]):
_, ensemble_serialized = (
gen_boosted_trees_ops.boosted_trees_serialize_ensemble(
shared_resources[0].handle))
return train_op, ensemble_serialized
def testTrainClassifierInMemory(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third = (
self._get_expected_ensembles_for_classification())
with self.cached_session() as sess:
# Train with train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_classification_head(n_classes=2),
run_config.RunConfig(),
is_classification=True,
train_in_memory=True)
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
def testTrainClassifierWithCenterBiasInMemory(self):
ops.reset_default_graph()
# When bias centering is on, we expect the very first node to have the
expected_first, expected_second, expected_third, expected_forth = (
self._get_expected_ensembles_for_classification_with_bias())
with self.cached_session() as sess:
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_classification_head(n_classes=2),
run_config.RunConfig(),
is_classification=True,
train_in_memory=True,
center_bias=True)
# 4 iterations to center bias.
for _ in range(4):
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
# Forth round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_forth, ensemble_proto)
def testTrainClassifierNonInMemory(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third = (
self._get_expected_ensembles_for_classification())
with self.cached_session() as sess:
# Train without train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_classification_head(n_classes=2),
run_config.RunConfig(),
is_classification=True,
train_in_memory=False)
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
def testTrainClassifierWithCenterBiasNonInMemory(self):
ops.reset_default_graph()
# When bias centering is on, we expect the very first node to have the
expected_first, expected_second, expected_third, expected_forth = (
self._get_expected_ensembles_for_classification_with_bias())
with self.cached_session() as sess:
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_classification_head(n_classes=2),
run_config.RunConfig(),
is_classification=True,
train_in_memory=False,
center_bias=True)
# 4 iterations to center bias.
for _ in range(4):
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
# Forth round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_forth, ensemble_proto)
def testTrainRegressorInMemory(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third = (
self._get_expected_ensembles_for_regression())
with self.cached_session() as sess:
# Train with train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_regression_head(label_dimension=1),
run_config.RunConfig(),
is_classification=False,
train_in_memory=True)
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
def testTrainRegressorInMemoryWithCenterBias(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third, expected_forth = (
self._get_expected_ensembles_for_regression_with_bias())
with self.cached_session() as sess:
# Train with train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_regression_head(label_dimension=1),
run_config.RunConfig(),
is_classification=False,
train_in_memory=True,
center_bias=True)
# 3 iterations to center bias.
for _ in range(3):
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
# Forth round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_forth, ensemble_proto)
def testTrainRegressorNonInMemory(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third = (
self._get_expected_ensembles_for_regression())
with self.cached_session() as sess:
# Train without train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_regression_head(label_dimension=1),
run_config.RunConfig(),
is_classification=False,
train_in_memory=False)
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
def testTrainRegressorNotInMemoryWithCenterBias(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third, expected_forth = (
self._get_expected_ensembles_for_regression_with_bias())
with self.cached_session() as sess:
# Train with train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_regression_head(label_dimension=1),
run_config.RunConfig(),
is_classification=False,
train_in_memory=False,
center_bias=True)
# 3 iterations to center the bias (because we are using regularization).
for _ in range(3):
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
# Forth round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_forth, ensemble_proto)
if __name__ == '__main__':
googletest.main()
| 32.211414
| 83
| 0.593243
| 7,125
| 64,906
| 5.109333
| 0.077754
| 0.030574
| 0.019448
| 0.01846
| 0.823316
| 0.800242
| 0.7856
| 0.77475
| 0.763075
| 0.748627
| 0
| 0.051152
| 0.324115
| 64,906
| 2,014
| 84
| 32.227408
| 0.778682
| 0.090962
| 0
| 0.701544
| 0
| 0
| 0.344254
| 0.009657
| 0
| 0
| 0
| 0
| 0.059463
| 1
| 0.027444
| false
| 0
| 0.012007
| 0.002287
| 0.049743
| 0.000572
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be12f8457527f8ac63152c710f55bd72fcbe84a0
| 25,579
|
py
|
Python
|
src/backend/kpdata/migrations/0001_initial.py
|
qiyangduan/kandbox_planner
|
4785a76443bd4b8f25aeb36e03945daedb165c43
|
[
"Apache-2.0"
] | 1
|
2020-05-03T21:26:43.000Z
|
2020-05-03T21:26:43.000Z
|
src/backend/kpdata/migrations/0001_initial.py
|
qiyangduan/kandbox_planner
|
4785a76443bd4b8f25aeb36e03945daedb165c43
|
[
"Apache-2.0"
] | 11
|
2020-11-13T18:48:37.000Z
|
2022-03-12T00:26:37.000Z
|
src/backend/kpdata/migrations/0001_initial.py
|
qiyangduan/kandbox_planner
|
4785a76443bd4b8f25aeb36e03945daedb165c43
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-05-05 19:24
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Game',
fields=[
('game_code', models.CharField(max_length=450, primary_key=True, serialize=False)),
('game_based_on', models.CharField(blank=True, max_length=1000, null=True)),
('game_start_date', models.DateTimeField(blank=True, default=datetime.datetime.now, null=True)),
('game_finish_date', models.DateTimeField(blank=True, null=True)),
('planner_code', models.CharField(blank=True, max_length=250, null=True)),
('game_config', models.CharField(blank=True, max_length=4000, null=True)),
('data_start_day', models.CharField(blank=True, max_length=250, null=True)),
('data_end_day', models.CharField(blank=True, max_length=250, null=True)),
('game_status', models.CharField(blank=True, max_length=1000, null=True)),
('error_message', models.CharField(blank=True, max_length=4000, null=True)),
('game_name', models.CharField(blank=True, max_length=1000, null=True)),
('game_description', models.CharField(blank=True, max_length=1000, null=True)),
],
),
migrations.CreateModel(
name='Job',
fields=[
('job_code', models.CharField(max_length=250, primary_key=True, serialize=False)),
('job_type', models.CharField(blank=True, max_length=250, null=True)),
('planning_status', models.CharField(blank=True, max_length=10, null=True, verbose_name='Status')),
('active', models.IntegerField(blank=True, default=1, null=True)),
('scheduled_related_worker_code', models.CharField(blank=True, max_length=1000, null=True)),
('scheduled_start_datetime', models.DateTimeField(blank=True, null=True, verbose_name='Planned Start')),
('scheduled_duration_minutes', models.FloatField(blank=True, null=True, verbose_name='Duration')),
('scheduled_share_status', models.CharField(blank=True, max_length=10, null=True)),
('requested_start_datetime', models.DateTimeField(verbose_name='Requested Start')),
('requested_duration_minutes', models.FloatField()),
('location_code', models.CharField(blank=True, max_length=250, null=True)),
('geo_longitude', models.FloatField()),
('geo_latitude', models.FloatField()),
('requested_min_level', models.IntegerField(blank=True, null=True)),
('requested_skills', models.CharField(blank=True, max_length=4000, null=True)),
('mandatory_day_flag', models.IntegerField(blank=True, null=True)),
('preferred_day_flag', models.IntegerField(blank=True, null=True)),
('conflict_level', models.IntegerField(blank=True, default=0, null=True)),
('mandatory_day_minmax_flag', models.IntegerField(blank=True, null=True)),
('preferred_day_minmax_flag', models.IntegerField(blank=True, null=True)),
('requested_start_min_day', models.IntegerField(blank=True, null=True)),
('requested_start_max_day', models.IntegerField(blank=True, null=True)),
('mandatory_minutes_minmax_flag', models.IntegerField(blank=True, null=True)),
('preferred_minutes_minmax_flag', models.IntegerField(blank=True, null=True)),
('requested_start_min_minutes', models.IntegerField(blank=True, null=True)),
('requested_start_max_minutes', models.IntegerField(blank=True, null=True)),
('requested_week_days_flag', models.IntegerField(blank=True, null=True)),
('requested_week_days', models.CharField(blank=True, max_length=100, null=True)),
('actual_start_datetime', models.DateTimeField(blank=True, null=True)),
('actual_duration_minutes', models.FloatField(blank=True, null=True)),
('scheduled_travel_minutes_before', models.FloatField(blank=True, null=True)),
('scheduled_travel_minutes_after', models.FloatField(blank=True, null=True)),
('scheduled_travel_prev_code', models.CharField(blank=True, max_length=250, null=True)),
('scheduled_travel_next_code', models.CharField(blank=True, max_length=250, null=True)),
('last_update_by', models.CharField(blank=True, max_length=250, null=True)),
('last_update_game_code', models.CharField(blank=True, max_length=250, null=True)),
('effective_from_date', models.DateField(blank=True, default=datetime.datetime.now, null=True)),
],
options={
'verbose_name': 'Current_Visit',
'verbose_name_plural': 'Visits',
},
),
migrations.CreateModel(
name='JobChangeHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('job_code', models.CharField(max_length=250, verbose_name='Visit')),
('job_type', models.CharField(blank=True, max_length=250, null=True, verbose_name='Type Info')),
('planning_status', models.CharField(blank=True, max_length=10, null=True, verbose_name='Status')),
('planner_code', models.CharField(default='orig', max_length=250, verbose_name='Planner')),
('game_code', models.CharField(max_length=450, verbose_name='Batch')),
('planning_note', models.CharField(blank=True, max_length=1000, null=True)),
('geo_longitude', models.FloatField(null=True)),
('geo_latitude', models.FloatField(null=True)),
('scheduled_worker_code', models.CharField(blank=True, max_length=250, null=True, verbose_name='Planned Tech')),
('scheduled_related_worker_code', models.CharField(blank=True, max_length=1000, null=True, verbose_name='Secondary Tech')),
('scheduled_start_datetime', models.DateTimeField(blank=True, null=True, verbose_name='Planned Start')),
('scheduled_duration_minutes', models.FloatField(null=True, verbose_name='Duration')),
('scheduled_share_status', models.CharField(blank=True, max_length=10, null=True)),
('scheduled_travel_minutes_before', models.FloatField(blank=True, null=True)),
('scheduled_travel_prev_code', models.CharField(blank=True, max_length=250, null=True)),
('operation', models.CharField(blank=True, default='U', max_length=10, null=True)),
('effective_from_date', models.DateTimeField(blank=True, default=datetime.datetime.now, null=True)),
],
options={
'verbose_name': 'Visit Change',
'verbose_name_plural': 'Changed Visits',
'db_table': 'kpdata_job_change_history',
},
),
migrations.CreateModel(
name='JobStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('planner_code', models.CharField(default='orig', max_length=250, verbose_name='Planner')),
('game_code', models.CharField(max_length=450, verbose_name='Batch')),
('job_code', models.CharField(max_length=250, verbose_name='Visit')),
('job_type', models.CharField(blank=True, max_length=250, null=True, verbose_name='Type Info')),
('requested_skills', models.CharField(blank=True, max_length=4000, null=True)),
('planning_status', models.CharField(blank=True, max_length=10, null=True, verbose_name='Status')),
('scheduled_worker_code', models.CharField(blank=True, max_length=250, null=True, verbose_name='Scheduled Worker')),
('scheduled_related_worker_code', models.CharField(blank=True, max_length=1000, null=True, verbose_name='Secondary Tech')),
('scheduled_start_datetime', models.DateTimeField(blank=True, null=True, verbose_name='Planned Start')),
('scheduled_duration_minutes', models.FloatField(null=True, verbose_name='Duration')),
('scheduled_travel_minutes_before', models.FloatField(blank=True, null=True)),
('scheduled_travel_minutes_after', models.FloatField(blank=True, null=True)),
('scheduled_travel_prev_code', models.CharField(blank=True, max_length=250, null=True)),
('scheduled_travel_next_code', models.CharField(blank=True, max_length=250, null=True)),
('conflict_level', models.IntegerField(blank=True, default=1, null=True)),
('scheduled_share_status', models.CharField(blank=True, max_length=10, null=True)),
('requested_start_datetime', models.DateTimeField(blank=True, null=True, verbose_name='Requested Start')),
('requested_duration_minutes', models.FloatField(blank=True, null=True)),
('requested_worker_code', models.CharField(blank=True, max_length=250, null=True, verbose_name='Requested Worker')),
('location_code', models.CharField(blank=True, max_length=250, null=True)),
('geo_longitude', models.FloatField(blank=True, null=True)),
('geo_latitude', models.FloatField(blank=True, null=True)),
('changed_flag', models.IntegerField(blank=True, default=0, null=True)),
('error_message', models.CharField(blank=True, max_length=3950, null=True)),
('effective_from_date', models.DateField(blank=True, default=datetime.datetime.now, null=True)),
('effective_to_date', models.DateField(blank=True, null=True)),
],
options={
'verbose_name': 'Batch_Visit',
'verbose_name_plural': 'Visit Batches',
},
),
migrations.CreateModel(
name='LocationJobHistoryFeatures',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location_code', models.CharField(max_length=250)),
('job_historical_worker_service_dict', models.CharField(blank=True, max_length=60000, null=True)),
('job_count', models.IntegerField(blank=True, null=True)),
('list_requested_worker_code', models.CharField(blank=True, max_length=4000, null=True)),
('avg_actual_start_minutes', models.FloatField(null=True)),
('avg_actual_duration_minutes', models.FloatField(null=True)),
('avg_days_delay', models.FloatField(null=True)),
('stddev_days_delay', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'kpdata_location_job_history_features',
},
),
migrations.CreateModel(
name='LocationWorkerAffinityFeatures',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location_code', models.CharField(max_length=250)),
('worker_code', models.CharField(max_length=250)),
('job_count', models.IntegerField(blank=True, null=True)),
('total_duration_minutes', models.IntegerField(blank=True, null=True)),
('location_2_home_distance', models.FloatField(blank=True, null=True)),
('location_2_working_gmm_distance', models.FloatField(blank=True, null=True)),
('affinity_score', models.FloatField(blank=True, null=True)),
],
options={
'db_table': 'kpdata_location_worker_affinity_features',
},
),
migrations.CreateModel(
name='PlannerParameter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('planner_code', models.CharField(max_length=250)),
('param_name', models.CharField(max_length=250)),
('mutable_flag', models.IntegerField(default=0)),
('param_value', models.CharField(blank=True, max_length=4000, null=True)),
],
options={
'db_table': 'kpdata_planner_parameter',
},
),
migrations.CreateModel(
name='PlannerStatisticalFeaturesPerDay',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('scheduled_start_day', models.CharField(max_length=250)),
('planner_code', models.CharField(max_length=250)),
('mean_on_site_service_time_ratio', models.FloatField(null=True)),
('sum_total_travel_minute', models.FloatField(null=True)),
('sum_total_service_minute', models.FloatField(null=True)),
('sum_total_on_duty_minute', models.FloatField(null=True)),
('total_job_count', models.IntegerField(blank=True, null=True)),
('total_unplanned_job_count', models.IntegerField(blank=True, null=True)),
('total_inplanning_job_count', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'kpdata_stats_features_per_day',
},
),
migrations.CreateModel(
name='PlannerStatisticalFeaturesPerWorker',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('scheduled_worker_code', models.CharField(max_length=250)),
('planner_code', models.CharField(max_length=250)),
('mean_on_site_service_time_ratio', models.FloatField(null=True)),
('sum_total_travel_minute', models.FloatField(null=True)),
('sum_total_service_minute', models.FloatField(null=True)),
('sum_total_on_duty_minute', models.FloatField(null=True)),
('total_job_count', models.IntegerField(blank=True, null=True)),
('total_unplanned_job_count', models.IntegerField(blank=True, null=True)),
('total_inplanning_job_count', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'kpdata_stats_features_per_worker',
},
),
migrations.CreateModel(
name='RLPlannerParameter',
fields=[
('planner_code', models.CharField(max_length=250, primary_key=True, serialize=False)),
('planner_env_class', models.CharField(max_length=250)),
('planner_agent_class', models.CharField(max_length=250)),
('allow_overtime', models.BooleanField()),
('nbr_of_observed_workers', models.IntegerField()),
('nbr_of_days_planning_window', models.IntegerField()),
('data_start_day', models.CharField(max_length=250)),
],
options={
'db_table': 'kpdata_rl_planner_parameter',
},
),
migrations.CreateModel(
name='Worker',
fields=[
('worker_code', models.CharField(max_length=250, primary_key=True, serialize=False, verbose_name='Worker Code')),
('name', models.CharField(blank=True, max_length=1000, null=True)),
('geo_longitude', models.FloatField(null=True)),
('geo_latitude', models.FloatField(null=True)),
('active', models.IntegerField(blank=True, default=1, null=True)),
('level', models.IntegerField(blank=True, default=1, null=True)),
('skills', models.CharField(blank=True, max_length=4000, null=True)),
('max_conflict_level', models.IntegerField(blank=True, default=1, null=True)),
('weekly_working_minutes', models.CharField(blank=True, max_length=950, null=True)),
('weekly_max_working_minutes', models.FloatField(blank=True, null=True)),
('lunch_break_minutes', models.FloatField(blank=True, null=True)),
('served_location_gmm', models.CharField(blank=True, max_length=2000, null=True)),
('last_update_by', models.CharField(blank=True, max_length=250, null=True)),
('last_update_game_code', models.CharField(blank=True, max_length=250, null=True)),
('effective_from_date', models.DateField(blank=True, default=datetime.datetime.now, null=True)),
],
options={
'verbose_name': 'Current_Tech',
'verbose_name_plural': 'Techs',
},
),
migrations.CreateModel(
name='WorkerAbsence',
fields=[
('absence_code', models.CharField(max_length=250, primary_key=True, serialize=False)),
('absence_type', models.CharField(blank=True, max_length=250, null=True)),
('worker_code', models.CharField(blank=True, max_length=250, null=True, verbose_name='Worker Code')),
('start_datetime', models.DateTimeField(blank=True, default=datetime.datetime.now, null=True)),
('end_datetime', models.DateTimeField(blank=True, default=datetime.datetime.now, null=True)),
('geo_longitude', models.FloatField(blank=True, null=True)),
('geo_latitude', models.FloatField(blank=True, null=True)),
('last_update_by', models.CharField(blank=True, max_length=250, null=True)),
('last_update_game_code', models.CharField(blank=True, max_length=250, null=True)),
('effective_from_date', models.DateField(blank=True, default=datetime.datetime.now, null=True)),
],
options={
'verbose_name': 'Current_Event',
'verbose_name_plural': 'Events',
},
),
migrations.CreateModel(
name='WorkerAbsenceStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('game_code', models.CharField(max_length=450)),
('absence_code', models.CharField(max_length=250)),
('absence_type', models.CharField(blank=True, max_length=250, null=True)),
('worker_code', models.CharField(blank=True, max_length=250, null=True, verbose_name='Service Area')),
('start_datetime', models.DateTimeField(blank=True, default=datetime.datetime.now, null=True)),
('end_datetime', models.DateTimeField(blank=True, default=datetime.datetime.now, null=True)),
('geo_longitude', models.FloatField(blank=True, null=True)),
('geo_latitude', models.FloatField(blank=True, null=True)),
('last_update_by', models.CharField(blank=True, max_length=250, null=True)),
('last_update_game_code', models.CharField(blank=True, max_length=250, null=True)),
('effective_from_date', models.DateField(blank=True, default=datetime.datetime.now, null=True)),
],
options={
'verbose_name': 'Batch_Event',
'verbose_name_plural': 'Event Histories',
},
),
migrations.CreateModel(
name='WorkerSkill',
fields=[
('skill_code', models.CharField(max_length=50, primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=50, null=True)),
('desc', models.CharField(blank=True, max_length=1000, null=True)),
],
),
migrations.CreateModel(
name='WorkerStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('worker_code', models.CharField(max_length=250, verbose_name='Service Area')),
('game_code', models.CharField(max_length=450)),
('planner_code', models.CharField(max_length=450)),
('name', models.CharField(blank=True, max_length=150, null=True)),
('active', models.IntegerField(blank=True, null=True)),
('level', models.IntegerField(blank=True, default=1, null=True)),
('skills', models.CharField(blank=True, max_length=4000, null=True)),
('geo_longitude', models.FloatField(null=True)),
('geo_latitude', models.FloatField(null=True)),
('max_conflict_level', models.IntegerField(blank=True, default=1, null=True)),
('weekly_working_minutes', models.CharField(blank=True, max_length=950, null=True)),
('weekly_max_working_minutes', models.FloatField(blank=True, null=True)),
('lunch_break_minutes', models.FloatField(blank=True, null=True)),
('last_update_by', models.CharField(blank=True, max_length=250, null=True)),
('effective_from_date', models.DateField(blank=True, default=datetime.datetime.now, null=True)),
],
options={
'verbose_name': 'Batch_Tech',
'verbose_name_plural': 'Tech Batches',
},
),
migrations.CreateModel(
name='WorkerLevel',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=50, null=True)),
('worker_skill_levels', models.ManyToManyField(to='kpdata.WorkerSkill')),
],
),
migrations.CreateModel(
name='WorkerAbsenceChanged',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('game_code', models.CharField(max_length=450)),
('start_datetime', models.DateTimeField(blank=True, default=datetime.datetime.now, null=True)),
('end_datetime', models.DateTimeField(blank=True, default=datetime.datetime.now, null=True)),
('approval_status_code', models.CharField(blank=True, max_length=250, null=True)),
('last_update_by', models.CharField(blank=True, max_length=250, null=True)),
('last_update_game_code', models.CharField(blank=True, max_length=250, null=True)),
('effective_from_date', models.DateField(blank=True, default=datetime.datetime.now, null=True)),
('worker_code', models.ForeignKey(blank=True, db_column='worker_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='absence_worker', to='kpdata.Worker')),
],
options={
'verbose_name': 'Batch_Event',
'verbose_name_plural': 'Batch Events',
},
),
migrations.AddConstraint(
model_name='plannerparameter',
constraint=models.UniqueConstraint(fields=('planner_code', 'param_name'), name='unique_parameter'),
),
migrations.AddField(
model_name='job',
name='actual_worker_code',
field=models.ForeignKey(blank=True, db_column='actual_worker_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='actual_worker', to='kpdata.Worker'),
),
migrations.AddField(
model_name='job',
name='requested_worker_code',
field=models.ForeignKey(db_column='requested_worker_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='requested_worker', to='kpdata.Worker', verbose_name='Requested Worker'),
),
migrations.AddField(
model_name='job',
name='scheduled_worker_code',
field=models.ForeignKey(blank=True, db_column='scheduled_worker_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='scheduled_worker', to='kpdata.Worker', verbose_name='Scheduled Worker'),
),
migrations.CreateModel(
name='GameChart',
fields=[
],
options={
'verbose_name': 'Visit_Chart',
'verbose_name_plural': 'Visit Chart',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('kpdata.game',),
),
migrations.CreateModel(
name='VueGameChart',
fields=[
],
options={
'verbose_name': 'Vue_Chart',
'verbose_name_plural': 'Vue Visit Chart',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('kpdata.game',),
),
]
| 62.540342
| 228
| 0.60663
| 2,656
| 25,579
| 5.605798
| 0.083584
| 0.088119
| 0.088656
| 0.106387
| 0.844449
| 0.824367
| 0.787091
| 0.745651
| 0.72295
| 0.643092
| 0
| 0.017177
| 0.255757
| 25,579
| 408
| 229
| 62.693627
| 0.764931
| 0.001759
| 0
| 0.586035
| 1
| 0
| 0.207034
| 0.086597
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007481
| 0
| 0.017456
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0792deba114bd78e5c37afcbff440c59c0147f34
| 65,867
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/config/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/config/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/config/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the routing
protocol instance
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__identifier",
"__name",
"__enabled",
"__default_metric",
)
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__identifier = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=True,
)
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
self.__enabled = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
self.__default_metric = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"config",
]
def _get_identifier(self):
"""
Getter method for identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/identifier (identityref)
YANG Description: The protocol identifier for the instance
"""
return self.__identifier
def _set_identifier(self, v, load=False):
"""
Setter method for identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/identifier (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_identifier is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_identifier() directly.
YANG Description: The protocol identifier for the instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """identifier must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}},), is_leaf=True, yang_name="identifier", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=True)""",
}
)
self.__identifier = t
if hasattr(self, "_set"):
self._set()
def _unset_identifier(self):
self.__identifier = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=True,
)
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/name (string)
YANG Description: A unique name for the protocol instance
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: A unique name for the protocol instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """name must be of a type compatible with string""",
"defined-type": "string",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=True)""",
}
)
self.__name = t
if hasattr(self, "_set"):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
def _get_enabled(self):
"""
Getter method for enabled, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/enabled (boolean)
YANG Description: A boolean value indicating whether the local protocol
instance is enabled.
"""
return self.__enabled
def _set_enabled(self, v, load=False):
"""
Setter method for enabled, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/enabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enabled() directly.
YANG Description: A boolean value indicating whether the local protocol
instance is enabled.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """enabled must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=True)""",
}
)
self.__enabled = t
if hasattr(self, "_set"):
self._set()
def _unset_enabled(self):
self.__enabled = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
def _get_default_metric(self):
"""
Getter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/default_metric (uint32)
YANG Description: The default metric within the RIB for entries that are
installed by this protocol instance. This value may
be overridden by protocol specific configuration options.
The lower the metric specified the more preferable the RIB
entry is to be selected for use within the network instance.
Where multiple entries have the same metric value then these
equal cost paths should be treated according to the specified
ECMP path selection behaviour for the instance
"""
return self.__default_metric
def _set_default_metric(self, v, load=False):
"""
Setter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/default_metric (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_metric() directly.
YANG Description: The default metric within the RIB for entries that are
installed by this protocol instance. This value may
be overridden by protocol specific configuration options.
The lower the metric specified the more preferable the RIB
entry is to be selected for use within the network instance.
Where multiple entries have the same metric value then these
equal cost paths should be treated according to the specified
ECMP path selection behaviour for the instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """default_metric must be of a type compatible with uint32""",
"defined-type": "uint32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=True)""",
}
)
self.__default_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_default_metric(self):
self.__default_metric = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
identifier = __builtin__.property(_get_identifier, _set_identifier)
name = __builtin__.property(_get_name, _set_name)
enabled = __builtin__.property(_get_enabled, _set_enabled)
default_metric = __builtin__.property(_get_default_metric, _set_default_metric)
_pyangbind_elements = OrderedDict(
[
("identifier", identifier),
("name", name),
("enabled", enabled),
("default_metric", default_metric),
]
)
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the routing
protocol instance
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__identifier",
"__name",
"__enabled",
"__default_metric",
)
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__identifier = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=True,
)
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
self.__enabled = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
self.__default_metric = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"config",
]
def _get_identifier(self):
"""
Getter method for identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/identifier (identityref)
YANG Description: The protocol identifier for the instance
"""
return self.__identifier
def _set_identifier(self, v, load=False):
"""
Setter method for identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/identifier (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_identifier is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_identifier() directly.
YANG Description: The protocol identifier for the instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """identifier must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}},), is_leaf=True, yang_name="identifier", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=True)""",
}
)
self.__identifier = t
if hasattr(self, "_set"):
self._set()
def _unset_identifier(self):
self.__identifier = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=True,
)
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/name (string)
YANG Description: A unique name for the protocol instance
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: A unique name for the protocol instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """name must be of a type compatible with string""",
"defined-type": "string",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=True)""",
}
)
self.__name = t
if hasattr(self, "_set"):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
def _get_enabled(self):
"""
Getter method for enabled, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/enabled (boolean)
YANG Description: A boolean value indicating whether the local protocol
instance is enabled.
"""
return self.__enabled
def _set_enabled(self, v, load=False):
"""
Setter method for enabled, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/enabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enabled() directly.
YANG Description: A boolean value indicating whether the local protocol
instance is enabled.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """enabled must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=True)""",
}
)
self.__enabled = t
if hasattr(self, "_set"):
self._set()
def _unset_enabled(self):
self.__enabled = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
def _get_default_metric(self):
"""
Getter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/default_metric (uint32)
YANG Description: The default metric within the RIB for entries that are
installed by this protocol instance. This value may
be overridden by protocol specific configuration options.
The lower the metric specified the more preferable the RIB
entry is to be selected for use within the network instance.
Where multiple entries have the same metric value then these
equal cost paths should be treated according to the specified
ECMP path selection behaviour for the instance
"""
return self.__default_metric
def _set_default_metric(self, v, load=False):
"""
Setter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config/default_metric (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_metric() directly.
YANG Description: The default metric within the RIB for entries that are
installed by this protocol instance. This value may
be overridden by protocol specific configuration options.
The lower the metric specified the more preferable the RIB
entry is to be selected for use within the network instance.
Where multiple entries have the same metric value then these
equal cost paths should be treated according to the specified
ECMP path selection behaviour for the instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """default_metric must be of a type compatible with uint32""",
"defined-type": "uint32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=True)""",
}
)
self.__default_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_default_metric(self):
self.__default_metric = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
identifier = __builtin__.property(_get_identifier, _set_identifier)
name = __builtin__.property(_get_name, _set_name)
enabled = __builtin__.property(_get_enabled, _set_enabled)
default_metric = __builtin__.property(_get_default_metric, _set_default_metric)
_pyangbind_elements = OrderedDict(
[
("identifier", identifier),
("name", name),
("enabled", enabled),
("default_metric", default_metric),
]
)
| 47.903273
| 2,852
| 0.519259
| 5,936
| 65,867
| 5.618598
| 0.038747
| 0.110818
| 0.137923
| 0.155913
| 0.988756
| 0.98225
| 0.98225
| 0.98225
| 0.98225
| 0.98225
| 0
| 0.004198
| 0.356248
| 65,867
| 1,374
| 2,853
| 47.938137
| 0.782369
| 0.118375
| 0
| 0.819757
| 0
| 0.006932
| 0.39865
| 0.124278
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024263
| false
| 0
| 0.012998
| 0
| 0.063258
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
07d06ead0447a6e00cb8210be839089572085d2f
| 18,740
|
py
|
Python
|
tests/test_datatransformer/test_sit_pubtatorAnnotationsInferenceTransformer.py
|
elangovana/kegg-pathway-extractor
|
08e9a28199bb4454e2e1a09c5d833f243f6f5768
|
[
"MIT"
] | 10
|
2019-12-17T01:17:06.000Z
|
2022-02-25T22:08:09.000Z
|
tests/test_datatransformer/test_sit_pubtatorAnnotationsInferenceTransformer.py
|
elangovana/kegg-pathway-extractor
|
08e9a28199bb4454e2e1a09c5d833f243f6f5768
|
[
"MIT"
] | 2
|
2021-03-31T18:40:32.000Z
|
2021-12-13T20:15:20.000Z
|
tests/test_datatransformer/test_sit_pubtatorAnnotationsInferenceTransformer.py
|
elangovana/kegg-pathway-extractor
|
08e9a28199bb4454e2e1a09c5d833f243f6f5768
|
[
"MIT"
] | 2
|
2020-08-25T19:31:33.000Z
|
2021-11-11T15:15:02.000Z
|
import functools
import json
import operator
import os
import tempfile
from io import StringIO
from unittest import TestCase
from unittest.mock import MagicMock
from datatransformer.pubtator_annotations_inference_transformer import PubtatorAnnotationsInferenceTransformer
class TestSitPubtatorAnnotationsInferenceTransformer(TestCase):
def test_parse(self):
# Arrange
sut = PubtatorAnnotationsInferenceTransformer()
input = StringIO("""19167335|a|Protein tyrosine phosphatases (PTPs) play a critical role in regulating cellular functions by selectively dephosphorylating their substrates. Here we present 22 human PTP crystal structures that, together with prior structural knowledge, enable a comprehensive analysis of the classical PTP family. Despite their largely conserved fold, surface properties of PTPs are strikingly diverse. A potential secondary substrate-binding pocket is frequently found in phosphatases, and this has implications for both substrate recognition and development of selective inhibitors. Structural comparison identified four diverse catalytic loop (WPD) conformations and suggested a mechanism for loop closure. Enzymatic assays revealed vast differences in PTP catalytic activity and identified PTPD1, PTPD2, and HDPTP as catalytically inert protein phosphatases. We propose a "head-to-toe" dimerization model for RPTPgamma/zeta that is distinct from the "inhibitory wedge" model and that provides a molecular basis for inhibitory regulation. This phosphatome resource gives an expanded insight into intrafamily PTP diversity, catalytic activity, substrate recognition, and autoregulatory self-association.
19167335 167 170 PTP Gene 10076
19167335 779 784 PTPD1 Gene 11099
25260751|a|Unlike the other MAP3Ks, MEKK1 (encoded by Map3k1) contains a PHD motif. To understand the role of this motif, we have created a knockin mutant of mouse Map3k1 (Map3k1(m) (PHD)) with an inactive PHD motif. Map3k1(m) (PHD) ES cells demonstrate that the MEKK1 PHD controls p38 and JNK activation during TGF-b, EGF and microtubule disruption signalling, but does not affect MAPK responses to hyperosmotic stress. Protein microarray profiling identified the adaptor TAB1 as a PHD substrate, and TGF-b- or EGF-stimulated Map3k1(m) (PHD) ES cells exhibit defective non-canonical ubiquitination of MEKK1 and TAB1. The MEKK1 PHD binds and mediates the transfer of Lys63-linked poly-Ub, using the conjugating enzyme UBE2N, onto TAB1 to regulate TAK1 and MAPK activation by TGF-b and EGF. Both the MEKK1 PHD and TAB1 are critical for ES-cell differentiation and tumourigenesis. Map3k1(m) (PHD) (/+) mice exhibit aberrant cardiac tissue, B-cell development, testis and T-cell signalling.
25260751 25 30 MEKK1 Gene 26401
""")
mock_text_nomaliser = MagicMock()
mock_anno = [{"charOffset": 12, "len": 10, "text": "protein"}]
mock_text_nomaliser.return_value = "Normalisedtext..", mock_anno
sut.textGeneNormaliser = mock_text_nomaliser
mock_gene_converter = MagicMock()
mock_gene_converter.convert.side_effect = lambda x: {x: ["Q{}".format(x)]}
sut.geneIdConverter = mock_gene_converter
expected = [
{"pubmedId": "19167335"
, "participant1Id": "Q10076"
, "participant1Name": "PTP"
, "participant2Id": "Q10076"
, "participant2Name": "PTP",
"abstract": """Protein tyrosine phosphatases (PTPs) play a critical role in regulating cellular functions by selectively dephosphorylating their substrates. Here we present 22 human PTP crystal structures that, together with prior structural knowledge, enable a comprehensive analysis of the classical PTP family. Despite their largely conserved fold, surface properties of PTPs are strikingly diverse. A potential secondary substrate-binding pocket is frequently found in phosphatases, and this has implications for both substrate recognition and development of selective inhibitors. Structural comparison identified four diverse catalytic loop (WPD) conformations and suggested a mechanism for loop closure. Enzymatic assays revealed vast differences in PTP catalytic activity and identified PTPD1, PTPD2, and HDPTP as catalytically inert protein phosphatases. We propose a "head-to-toe" dimerization model for RPTPgamma/zeta that is distinct from the "inhibitory wedge" model and that provides a molecular basis for inhibitory regulation. This phosphatome resource gives an expanded insight into intrafamily PTP diversity, catalytic activity, substrate recognition, and autoregulatory self-association."""
,
"normalised_abstract": "Normalisedtext.."
, "normalised_abstract_annotations": mock_anno,
"annotations": [{'start': '167', 'end': '170', 'name': 'PTP', 'type': 'Gene', 'normalised_id': '10076'},
{'start': '779', 'end': '784', 'name': 'PTPD1', 'type': 'Gene', 'normalised_id': '11099'}
]
, "gene_to_uniprot_map":
{'10076': 'Q10076', '11099': 'Q11099'}
}
,
{"pubmedId": "19167335"
, "participant1Id": "Q10076"
, "participant1Name": "PTP"
, "participant2Id": "Q11099"
, "participant2Name": "PTPD1",
"abstract": """Protein tyrosine phosphatases (PTPs) play a critical role in regulating cellular functions by selectively dephosphorylating their substrates. Here we present 22 human PTP crystal structures that, together with prior structural knowledge, enable a comprehensive analysis of the classical PTP family. Despite their largely conserved fold, surface properties of PTPs are strikingly diverse. A potential secondary substrate-binding pocket is frequently found in phosphatases, and this has implications for both substrate recognition and development of selective inhibitors. Structural comparison identified four diverse catalytic loop (WPD) conformations and suggested a mechanism for loop closure. Enzymatic assays revealed vast differences in PTP catalytic activity and identified PTPD1, PTPD2, and HDPTP as catalytically inert protein phosphatases. We propose a "head-to-toe" dimerization model for RPTPgamma/zeta that is distinct from the "inhibitory wedge" model and that provides a molecular basis for inhibitory regulation. This phosphatome resource gives an expanded insight into intrafamily PTP diversity, catalytic activity, substrate recognition, and autoregulatory self-association."""
,
"normalised_abstract": "Normalisedtext.."
, "normalised_abstract_annotations": mock_anno
, "annotations": [
{'start': '167', 'end': '170', 'name': 'PTP', 'type': 'Gene', 'normalised_id': '10076'},
{'start': '779', 'end': '784', 'name': 'PTPD1', 'type': 'Gene', 'normalised_id': '11099'}]
, "gene_to_uniprot_map":
{'10076': 'Q10076', '11099': 'Q11099'}
},
{"pubmedId": "19167335"
, "participant1Id": "Q11099"
, "participant1Name": "PTPD1"
, "participant2Id": "Q11099"
, "participant2Name": "PTPD1"
,
"abstract": """Protein tyrosine phosphatases (PTPs) play a critical role in regulating cellular functions by selectively dephosphorylating their substrates. Here we present 22 human PTP crystal structures that, together with prior structural knowledge, enable a comprehensive analysis of the classical PTP family. Despite their largely conserved fold, surface properties of PTPs are strikingly diverse. A potential secondary substrate-binding pocket is frequently found in phosphatases, and this has implications for both substrate recognition and development of selective inhibitors. Structural comparison identified four diverse catalytic loop (WPD) conformations and suggested a mechanism for loop closure. Enzymatic assays revealed vast differences in PTP catalytic activity and identified PTPD1, PTPD2, and HDPTP as catalytically inert protein phosphatases. We propose a "head-to-toe" dimerization model for RPTPgamma/zeta that is distinct from the "inhibitory wedge" model and that provides a molecular basis for inhibitory regulation. This phosphatome resource gives an expanded insight into intrafamily PTP diversity, catalytic activity, substrate recognition, and autoregulatory self-association."""
,
"normalised_abstract": "Normalisedtext.."
, "normalised_abstract_annotations": mock_anno
, "annotations": [
{'start': '167', 'end': '170', 'name': 'PTP', 'type': 'Gene', 'normalised_id': '10076'},
{'start': '779', 'end': '784', 'name': 'PTPD1', 'type': 'Gene', 'normalised_id': '11099'}]
, "gene_to_uniprot_map": {'10076': 'Q10076', '11099': 'Q11099'}
}
, {"pubmedId": "25260751"
, "participant1Id": "Q26401"
, "participant1Name": "MEKK1"
, "participant2Id": "Q26401"
, "participant2Name": "MEKK1",
"abstract": "Unlike the other MAP3Ks, MEKK1 (encoded by Map3k1) contains a PHD motif. To understand the role of this motif, we have created a knockin mutant of mouse Map3k1 (Map3k1(m) (PHD)) with an inactive PHD motif. Map3k1(m) (PHD) ES cells demonstrate that the MEKK1 PHD controls p38 and JNK activation during TGF-b, EGF and microtubule disruption signalling, but does not affect MAPK responses to hyperosmotic stress. Protein microarray profiling identified the adaptor TAB1 as a PHD substrate, and TGF-b- or EGF-stimulated Map3k1(m) (PHD) ES cells exhibit defective non-canonical ubiquitination of MEKK1 and TAB1. The MEKK1 PHD binds and mediates the transfer of Lys63-linked poly-Ub, using the conjugating enzyme UBE2N, onto TAB1 to regulate TAK1 and MAPK activation by TGF-b and EGF. Both the MEKK1 PHD and TAB1 are critical for ES-cell differentiation and tumourigenesis. Map3k1(m) (PHD) (/+) mice exhibit aberrant cardiac tissue, B-cell development, testis and T-cell signalling. "
,
"normalised_abstract": "Normalisedtext.."
, "normalised_abstract_annotations": mock_anno,
'annotations': [{'start': '25', 'end': '30', 'name': 'MEKK1', 'type': 'Gene', 'normalised_id': '26401'}]
, "gene_to_uniprot_map": {'26401': 'Q26401'}
}]
# Act
actual = sut.parse(input)
# Assert
sort_func = lambda x: "{}#{}#{}".format(x["pubmedId"], x["participant1Id"],
x["participant2Id"])
self.assertEqual(expected, sorted(list(actual), key=sort_func))
def test_parse_self_relation(self):
# Arrange
sut = PubtatorAnnotationsInferenceTransformer(filter_self_relation=True)
input = StringIO("""25260751|a|Unlike the other MAP3Ks, MEKK1 (encoded by Map3k1) contains a PHD motif. To understand the role of this motif, we have created a knockin mutant of mouse Map3k1 (Map3k1(m) (PHD)) with an inactive PHD motif. Map3k1(m) (PHD) ES cells demonstrate that the MEKK1 PHD controls p38 and JNK activation during TGF-b, EGF and microtubule disruption signalling, but does not affect MAPK responses to hyperosmotic stress. Protein microarray profiling identified the adaptor TAB1 as a PHD substrate, and TGF-b- or EGF-stimulated Map3k1(m) (PHD) ES cells exhibit defective non-canonical ubiquitination of MEKK1 and TAB1. The MEKK1 PHD binds and mediates the transfer of Lys63-linked poly-Ub, using the conjugating enzyme UBE2N, onto TAB1 to regulate TAK1 and MAPK activation by TGF-b and EGF. Both the MEKK1 PHD and TAB1 are critical for ES-cell differentiation and tumourigenesis. Map3k1(m) (PHD) (/+) mice exhibit aberrant cardiac tissue, B-cell development, testis and T-cell signalling.
25260751 25 30 MEKK1 Gene 26401
""")
mock_text_nomaliser = MagicMock()
mock_text_nomaliser.return_value = "Normalisedtext..", [{"charOffset": 12, "len": 10, "text": "protein"}]
sut.textGeneNormaliser = mock_text_nomaliser
mock_gene_converter = MagicMock()
mock_gene_converter.convert.side_effect = lambda x: {x: ["Q{}".format(x)]}
sut.geneIdConverter = mock_gene_converter
expected = []
# Act
actual = sut.parse(input)
# Assert
self.assertEqual(expected, sorted(list(actual)))
def test_parse_missing_mapping(self):
# Arrange
sut = PubtatorAnnotationsInferenceTransformer(filter_self_relation=True)
input = StringIO("""19167335|a|Protein tyrosine phosphatases (PTPs) play a critical role in regulating cellular functions by selectively dephosphorylating their substrates. Here we present 22 human PTP crystal structures that, together with prior structural knowledge, enable a comprehensive analysis of the classical PTP family. Despite their largely conserved fold, surface properties of PTPs are strikingly diverse. A potential secondary substrate-binding pocket is frequently found in phosphatases, and this has implications for both substrate recognition and development of selective inhibitors. Structural comparison identified four diverse catalytic loop (WPD) conformations and suggested a mechanism for loop closure. Enzymatic assays revealed vast differences in PTP catalytic activity and identified PTPD1, PTPD2, and HDPTP as catalytically inert protein phosphatases. We propose a "head-to-toe" dimerization model for RPTPgamma/zeta that is distinct from the "inhibitory wedge" model and that provides a molecular basis for inhibitory regulation. This phosphatome resource gives an expanded insight into intrafamily PTP diversity, catalytic activity, substrate recognition, and autoregulatory self-association.
19167335 167 170 PTP Gene 10076
19167335 779 784 PTPD1 Gene 11099
""")
mock_text_nomaliser = MagicMock()
mock_text_nomaliser.return_value = "Normalisedtext..", [{"charOffset": 12, "len": 10, "text": "protein"}]
sut.textGeneNormaliser = mock_text_nomaliser
mock_gene_converter = MagicMock()
mock_gene_converter.convert.side_effect = lambda x: {}
sut.geneIdConverter = mock_gene_converter
expected = []
# Act
actual = sut.parse(input)
# Assert
actual = list(actual)
print(actual)
self.assertEqual(expected, sorted(actual))
def test_load_file(self):
# Arrange
sut = PubtatorAnnotationsInferenceTransformer()
input_file = os.path.join(os.path.dirname(__file__), "data_sample_annotation", "sample_1.txt")
expected_records = 3
mock_text_nomaliser = MagicMock()
mock_text_nomaliser.return_value = "Normalisedtext..", [{"charOffset": 12, "len": 10, "text": "protein"}]
sut.textGeneNormaliser = mock_text_nomaliser
mock_gene_converter = MagicMock()
mock_gene_converter.convert.side_effect = lambda x: {x: ["Q{}".format(x)]}
sut.geneIdConverter = mock_gene_converter
# Act
actual = sut.load_file(input_file)
# Assert
self.assertEqual(expected_records, len(list(actual)))
def test_load_directory(self):
# Arrange
sut = PubtatorAnnotationsInferenceTransformer()
input_file = os.path.join(os.path.dirname(__file__), "data_sample_annotation")
expected_parts_len = 2
expected_total_records = 4
mock_text_nomaliser = MagicMock()
mock_text_nomaliser.return_value = "Normalisedtext..", [{"charOffset": 12, "len": 10, "text": "protein"}]
sut.textGeneNormaliser = mock_text_nomaliser
mock_gene_converter = MagicMock()
mock_gene_converter.convert.side_effect = lambda x: {x: ["Q{}".format(x)]}
sut.geneIdConverter = mock_gene_converter
# Act
actual = sut.load_directory(input_file)
# Assert
actual_list = list(actual)
self.assertEqual(expected_parts_len, len(actual_list))
self.assertEqual(expected_total_records, len(list(functools.reduce(operator.iconcat, actual_list, []))))
def test_load_directory_save(self):
# Arrange
sut = PubtatorAnnotationsInferenceTransformer()
input_dir = os.path.join(os.path.dirname(__file__), "data_sample_annotation")
dest_dir = tempfile.mkdtemp()
expected_parts_len = 2
expected_total_records = 4
mock_text_nomaliser = MagicMock()
mock_text_nomaliser.return_value = "Normalisedtext..", [{"charOffset": 12, "len": 10, "text": "protein"}]
sut.textGeneNormaliser = mock_text_nomaliser
mock_gene_converter = MagicMock()
mock_gene_converter.convert.side_effect = lambda x: {x: ["Q{}".format(x)]}
sut.geneIdConverter = mock_gene_converter
# Act
actual = sut.load_directory_save(input_dir, dest_dir)
# Assert
self.assertEqual(expected_parts_len, len(os.listdir(dest_dir)))
# Assert that the length of the array within the json file matches
total_actual = 0
for f in os.listdir(dest_dir):
with open(os.path.join(dest_dir, f), "r") as handle:
total_actual += len(json.load(handle))
self.assertEqual(expected_total_records, total_actual)
def test_load_directory_save_ignore_empty_File(self):
# Arrange
sut = PubtatorAnnotationsInferenceTransformer(filter_self_relation=True)
input_dir = os.path.join(os.path.dirname(__file__), "data_sample_annotation")
dest_dir = tempfile.mkdtemp()
expected_parts_len = 1
expected_total_records = 1
mock_text_nomaliser = MagicMock()
mock_text_nomaliser.return_value = "Normalisedtext..", [{"charOffset": 12, "len": 10, "text": "protein"}]
sut.textGeneNormaliser = mock_text_nomaliser
mock_gene_converter = MagicMock()
mock_gene_converter.convert.side_effect = lambda x: {x: ["Q{}".format(x)]}
sut.geneIdConverter = mock_gene_converter
# Act
actual = sut.load_directory_save(input_dir, dest_dir)
# Assert
self.assertEqual(expected_parts_len, len(os.listdir(dest_dir)))
# Assert that the length of the array within the json file matches
total_actual = 0
for f in os.listdir(dest_dir):
with open(os.path.join(dest_dir, f), "r") as handle:
total_actual += len(json.load(handle))
self.assertEqual(expected_total_records, total_actual)
| 72.635659
| 1,229
| 0.707791
| 2,239
| 18,740
| 5.812416
| 0.140241
| 0.012909
| 0.027432
| 0.028508
| 0.92754
| 0.910404
| 0.894268
| 0.887045
| 0.887045
| 0.881282
| 0
| 0.035941
| 0.210139
| 18,740
| 257
| 1,230
| 72.918288
| 0.843264
| 0.013981
| 0
| 0.625
| 0
| 0.043478
| 0.584535
| 0.011488
| 0
| 0
| 0
| 0
| 0.054348
| 1
| 0.038043
| false
| 0
| 0.048913
| 0
| 0.092391
| 0.005435
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07e3fa8e5c5f1501a7a085c6accbfc1a807f352a
| 77
|
py
|
Python
|
datasets/__init__.py
|
jayantabh/adversarial-attacks
|
4d4df9fe347e0feaf73f86c80926c8edac58724c
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
jayantabh/adversarial-attacks
|
4d4df9fe347e0feaf73f86c80926c8edac58724c
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
jayantabh/adversarial-attacks
|
4d4df9fe347e0feaf73f86c80926c8edac58724c
|
[
"MIT"
] | null | null | null |
from datasets import voc as voc
from datasets import cityscapes as cityscapes
| 38.5
| 45
| 0.857143
| 12
| 77
| 5.5
| 0.5
| 0.363636
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 77
| 2
| 45
| 38.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed1b22f9a06e315813795c9c11c941f5720eeefe
| 7,361
|
py
|
Python
|
src/abaqus/StepOutput/FieldOutputRequestState.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/StepOutput/FieldOutputRequestState.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/StepOutput/FieldOutputRequestState.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
import typing
from abaqusConstants import *
class FieldOutputRequestState:
"""The FieldOutputRequestState object stores the propagating data of a field output request
current in a step. One instance of this object is created internally by the
FieldOutputRequest object for each step. The instance is also deleted internally by the
FieldOutputRequest object.
The FieldOutputRequestState object has no constructor or methods.
Attributes
----------
variablesState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the field output request
variables. Possible values are UNSET, SET, and UNCHANGED.
frequency: SymbolicConstant
The SymbolicConstant LAST_INCREMENT or an Int specifying the output frequency in
increments. The default value is 1.
frequencyState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the field output request
frequency. Possible values are UNSET, SET, and UNCHANGED.
modesState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the field output request modes.
Possible values are UNSET, SET, and UNCHANGED.
timeInterval: typing.Union[SymbolicConstant, float]
The SymbolicConstant EVERY_TIME_INCREMENT or a Float specifying the time interval at
which the output states are to be written. The default value is EVERY_TIME_INCREMENT.
timeIntervalState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the field output request time
interval. Possible values are UNSET, SET, and UNCHANGED.
numIntervals: int
An Int specifying the number of intervals during the step at which output database
states are to be written. The default value is 20.
numIntervalsState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the field output request.
Possible values are UNSET, SET, and UNCHANGED.
timeMarks: Boolean
A Boolean specifying when to write results to the output database. The default value is
OFF.
timeMarksState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the field output request.
Possible values are UNSET, SET, and UNCHANGED.
timePointState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the field output request.
Possible values are UNSET, SET, and UNCHANGED.
status: SymbolicConstant
A SymbolicConstant specifying the propagation state of the FieldOutputRequestState
object. Possible values are NOT_YET_ACTIVE, CREATED, PROPAGATED, MODIFIED, DEACTIVATED,
NO_LONGER_ACTIVE, TYPE_NOT_APPLICABLE, and INSTANCE_NOT_APPLICABLE.
variables: SymbolicConstant
A tuple of Strings specifying output request variable or component names, or the
SymbolicConstant PRESELECT or ALL. PRESELECT represents all default output variables for
the given step. ALL represents all valid output variables.
modes: SymbolicConstant
The SymbolicConstant ALL or a tuple of Ints specifying a list of eigenmodes for which
output is desired. The default value is ALL.
timePoint: str
A String specifying the name of a time :py:class:`~.point` object used to determine which output
database states are to be written. The default value is an empty string.
frequencyType: str
A String specifying a read-only SymbolicConstant describing which type of frequency of
output is used. Possible values areFREQUENCY, NUMBER_INTERVALS, TIME_INTERVAL,
TIME_POINT and MODES. The default value depends on the procedure. The default value is
an empty string.
Notes
-----
This object can be accessed by:
.. code-block:: python
import step
mdb.models[name].steps[name].fieldOutputRequestState[name]
"""
# A SymbolicConstant specifying the propagation state of the field output request
# variables. Possible values are UNSET, SET, and UNCHANGED.
variablesState: SymbolicConstant = None
# The SymbolicConstant LAST_INCREMENT or an Int specifying the output frequency in
# increments. The default value is 1.
frequency: SymbolicConstant = 1
# A SymbolicConstant specifying the propagation state of the field output request
# frequency. Possible values are UNSET, SET, and UNCHANGED.
frequencyState: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the field output request modes.
# Possible values are UNSET, SET, and UNCHANGED.
modesState: SymbolicConstant = None
# The SymbolicConstant EVERY_TIME_INCREMENT or a Float specifying the time interval at
# which the output states are to be written. The default value is EVERY_TIME_INCREMENT.
timeInterval: typing.Union[SymbolicConstant, float] = EVERY_TIME_INCREMENT
# A SymbolicConstant specifying the propagation state of the field output request time
# interval. Possible values are UNSET, SET, and UNCHANGED.
timeIntervalState: SymbolicConstant = None
# An Int specifying the number of intervals during the step at which output database
# states are to be written. The default value is 20.
numIntervals: int = 20
# A SymbolicConstant specifying the propagation state of the field output request.
# Possible values are UNSET, SET, and UNCHANGED.
numIntervalsState: SymbolicConstant = None
# A Boolean specifying when to write results to the output database. The default value is
# OFF.
timeMarks: Boolean = OFF
# A SymbolicConstant specifying the propagation state of the field output request.
# Possible values are UNSET, SET, and UNCHANGED.
timeMarksState: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the field output request.
# Possible values are UNSET, SET, and UNCHANGED.
timePointState: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the FieldOutputRequestState
# object. Possible values are NOT_YET_ACTIVE, CREATED, PROPAGATED, MODIFIED, DEACTIVATED,
# NO_LONGER_ACTIVE, TYPE_NOT_APPLICABLE, and INSTANCE_NOT_APPLICABLE.
status: SymbolicConstant = None
# A tuple of Strings specifying output request variable or component names, or the
# SymbolicConstant PRESELECT or ALL. PRESELECT represents all default output variables for
# the given step. ALL represents all valid output variables.
variables: SymbolicConstant = None
# The SymbolicConstant ALL or a tuple of Ints specifying a list of eigenmodes for which
# output is desired. The default value is ALL.
modes: SymbolicConstant = ALL
# A String specifying the name of a time point object used to determine which output
# database states are to be written. The default value is an empty string.
timePoint: str = ''
# A String specifying a read-only SymbolicConstant describing which type of frequency of
# output is used. Possible values areFREQUENCY, NUMBER_INTERVALS, TIME_INTERVAL,
# TIME_POINT and MODES. The default value depends on the procedure. The default value is
# an empty string.
frequencyType: str = ''
| 50.417808
| 104
| 0.744464
| 908
| 7,361
| 5.997797
| 0.155286
| 0.05729
| 0.079324
| 0.088138
| 0.837679
| 0.803342
| 0.803342
| 0.803342
| 0.791957
| 0.777084
| 0
| 0.001566
| 0.219264
| 7,361
| 145
| 105
| 50.765517
| 0.946059
| 0.832496
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.105263
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
ed214d33cdfd1de05820590f338185ee4665fc39
| 86
|
py
|
Python
|
CH_03_pythonic_syntax/T_29_circular_imports_b.py
|
mastering-python/code_2
|
441af8b67402c8216c482cca7c002e1d7f0f1baa
|
[
"MIT"
] | null | null | null |
CH_03_pythonic_syntax/T_29_circular_imports_b.py
|
mastering-python/code_2
|
441af8b67402c8216c482cca7c002e1d7f0f1baa
|
[
"MIT"
] | null | null | null |
CH_03_pythonic_syntax/T_29_circular_imports_b.py
|
mastering-python/code_2
|
441af8b67402c8216c482cca7c002e1d7f0f1baa
|
[
"MIT"
] | null | null | null |
import T_29_circular_imports_a
class FileB(T_29_circular_imports_a.FileA):
pass
| 14.333333
| 43
| 0.825581
| 15
| 86
| 4.2
| 0.666667
| 0.095238
| 0.349206
| 0.571429
| 0.603175
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053333
| 0.127907
| 86
| 5
| 44
| 17.2
| 0.786667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
ed6141bdada557d90c024742efcc157a7cf34f3e
| 373
|
py
|
Python
|
coordination_network_toolkit/__init__.py
|
QUT-Digital-Observatory/coordination-network-toolkit
|
6d60fcb4b7b654af2021fb14db100597722f00d7
|
[
"MIT"
] | 29
|
2020-12-30T15:40:51.000Z
|
2022-03-31T11:47:25.000Z
|
coordination_network_toolkit/__init__.py
|
QUT-Digital-Observatory/coordination-network-toolkit
|
6d60fcb4b7b654af2021fb14db100597722f00d7
|
[
"MIT"
] | 33
|
2020-11-16T02:22:20.000Z
|
2022-03-25T04:25:00.000Z
|
coordination_network_toolkit/__init__.py
|
QUT-Digital-Observatory/coordination-network-toolkit
|
6d60fcb4b7b654af2021fb14db100597722f00d7
|
[
"MIT"
] | 4
|
2021-03-09T20:25:19.000Z
|
2022-03-31T10:25:05.000Z
|
from coordination_network_toolkit.__main__ import main
import coordination_network_toolkit.compute_networks
import coordination_network_toolkit.database
import coordination_network_toolkit.graph
import coordination_network_toolkit.output
import coordination_network_toolkit.preprocess
import coordination_network_toolkit.similarity
import coordination_network_toolkit.urls
| 41.444444
| 54
| 0.930295
| 43
| 373
| 7.581395
| 0.325581
| 0.466258
| 0.638037
| 0.687117
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048257
| 373
| 8
| 55
| 46.625
| 0.91831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed79ccc76605261c8132bbe62686159c8f5bd5c2
| 6,841
|
py
|
Python
|
tests/nnapi/specs/skip/V1_2/box_with_nms_limit_gaussian.mod.py
|
bogus-sudo/ONE-1
|
7052a817eff661ec2854ed2e7ee0de5e8ba82b55
|
[
"Apache-2.0"
] | 255
|
2020-05-22T07:45:29.000Z
|
2022-03-29T23:58:22.000Z
|
tests/nnapi/specs/skip/V1_2/box_with_nms_limit_gaussian.mod.py
|
bogus-sudo/ONE-1
|
7052a817eff661ec2854ed2e7ee0de5e8ba82b55
|
[
"Apache-2.0"
] | 5,102
|
2020-05-22T07:48:33.000Z
|
2022-03-31T23:43:39.000Z
|
test/cts/tool/CTSConverter/src/nn/specs/V1_2/box_with_nms_limit_gaussian.mod.py
|
ibelem/webml-polyfill
|
aaf1ba4f5357eaf6e89bf9990f5bdfb543cd2bc2
|
[
"Apache-2.0"
] | 120
|
2020-05-22T07:51:08.000Z
|
2022-02-16T19:08:05.000Z
|
#
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# TEST 1: BOX_WITH_NMS_LIMIT, score_threshold = 0.3, sigma = 0.5, max_detections = -1
i1 = Input("scores", "TENSOR_FLOAT32", "{19, 3}") # scores
i2 = Input("roi", "TENSOR_FLOAT32", "{19, 12}") # roi
i3 = Input("batchSplit", "TENSOR_INT32", "{19}") # batchSplit
o1 = Output("scoresOut", "TENSOR_FLOAT32", "{18}") # scores out
o2 = Output("roiOut", "TENSOR_FLOAT32", "{18, 4}") # roi out
o3 = Output("classesOut", "TENSOR_INT32", "{18}") # classes out
o4 = Output("batchSplitOut", "TENSOR_INT32", "{18}") # batch split out
model = Model().Operation("BOX_WITH_NMS_LIMIT", i1, i2, i3, 0.3, -1, 2, 0.4, 0.5, 0.3).To(o1, o2, o3, o4)
quant8 = DataTypeConverter().Identify({
i1: ("TENSOR_QUANT8_ASYMM", 0.01, 0),
i2: ("TENSOR_QUANT16_ASYMM", 0.125, 0),
o1: ("TENSOR_QUANT8_ASYMM", 0.01, 0),
o2: ("TENSOR_QUANT16_ASYMM", 0.125, 0)
})
input0 = {
i1: [ # scores
0.90, 0.95, 0.75,
0.80, 0.70, 0.85,
0.60, 0.90, 0.95,
0.90, 0.65, 0.90,
0.80, 0.85, 0.80,
0.60, 0.60, 0.20,
0.60, 0.80, 0.40,
0.90, 0.55, 0.60,
0.90, 0.75, 0.70,
0.80, 0.70, 0.85,
0.90, 0.95, 0.75,
0.80, 0.85, 0.80,
0.60, 0.90, 0.95,
0.60, 0.60, 0.20,
0.50, 0.90, 0.80,
0.90, 0.75, 0.70,
0.90, 0.65, 0.90,
0.90, 0.55, 0.60,
0.60, 0.80, 0.40
],
i2: [ # roi
1, 1, 10, 10, 0, 0, 10, 10, 0, 0, 10, 10,
2, 2, 11, 11, 1, 1, 11, 11, 1, 1, 11, 11,
3, 3, 12, 12, 2, 2, 12, 12, 2, 2, 12, 12,
4, 4, 13, 13, 3, 3, 13, 13, 3, 3, 13, 13,
5, 5, 14, 14, 4, 4, 14, 14, 4, 4, 14, 14,
6, 6, 15, 15, 5, 5, 15, 15, 5, 5, 15, 15,
7, 7, 16, 16, 6, 6, 16, 16, 6, 6, 16, 16,
8, 8, 17, 17, 7, 7, 17, 17, 7, 7, 17, 17,
9, 9, 18, 18, 8, 8, 18, 18, 8, 8, 18, 18,
2, 2, 11, 11, 2, 2, 12, 12, 2, 2, 12, 12,
1, 1, 10, 10, 1, 1, 11, 11, 1, 1, 11, 11,
5, 5, 14, 14, 5, 5, 15, 15, 5, 5, 15, 15,
3, 3, 12, 12, 3, 3, 13, 13, 3, 3, 13, 13,
6, 6, 15, 15, 6, 6, 16, 16, 6, 6, 16, 16,
0, 0, 1, 1, 0, 0, 2, 2, 0, 0, 2, 2,
9, 9, 18, 18, 9, 9, 19, 19, 9, 9, 19, 19,
4, 4, 13, 13, 4, 4, 14, 14, 4, 4, 14, 14,
8, 8, 17, 17, 8, 8, 18, 18, 8, 8, 18, 18,
7, 7, 16, 16, 7, 7, 17, 17, 7, 7, 17, 17
],
i3: [0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] # batch split
}
output0 = {
o1: [
0.95, 0.7879927, 0.52485234, 0.47400165, 0.95, 0.6894936, 0.4812244, 0.42367333,
0.95, 0.89983034, 0.7879927, 0.52485234, 0.47400165, 0.95, 0.8, 0.6894936, 0.4811337, 0.42367333
],
o2: [
0, 0, 10, 10,
6, 6, 16, 16,
2, 2, 12, 12,
8, 8, 18, 18,
2, 2, 12, 12,
8, 8, 18, 18,
0, 0, 10, 10,
4, 4, 14, 14,
1, 1, 11, 11,
0, 0, 2, 2,
7, 7, 17, 17,
3, 3, 13, 13,
9, 9, 19, 19,
3, 3, 13, 13,
0, 0, 2, 2,
9, 9, 19, 19,
1, 1, 11, 11,
5, 5, 15, 15
],
o3: [1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2],
o4: [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
}
Example((input0, output0)).AddVariations("relaxed", "float16", quant8)
# TEST 2: BOX_WITH_NMS_LIMIT, score_threshold = 0.3, nms_threshold = 0.4, max_detections = 5
i1 = Input("scores", "TENSOR_FLOAT32", "{19, 3}") # scores
i2 = Input("roi", "TENSOR_FLOAT32", "{19, 12}") # roi
i3 = Input("batchSplit", "TENSOR_INT32", "{19}") # batchSplit
o1 = Output("scoresOut", "TENSOR_FLOAT32", "{10}") # scores out
o2 = Output("roiOut", "TENSOR_FLOAT32", "{10, 4}") # roi out
o3 = Output("classesOut", "TENSOR_INT32", "{10}") # classes out
o4 = Output("batchSplitOut", "TENSOR_INT32", "{10}") # batch split out
model = Model().Operation("BOX_WITH_NMS_LIMIT", i1, i2, i3, 0.3, 5, 2, 0.4, 0.5, 0.3).To(o1, o2, o3, o4)
quant8 = DataTypeConverter().Identify({
i1: ("TENSOR_QUANT8_ASYMM", 0.01, 128),
i2: ("TENSOR_QUANT16_ASYMM", 0.125, 0),
o1: ("TENSOR_QUANT8_ASYMM", 0.01, 128),
o2: ("TENSOR_QUANT16_ASYMM", 0.125, 0)
})
input0 = {
i1: [ # scores
0.90, 0.95, 0.75,
0.80, 0.70, 0.85,
0.60, 0.90, 0.95,
0.90, 0.65, 0.90,
0.80, 0.85, 0.80,
0.60, 0.60, 0.20,
0.60, 0.80, 0.40,
0.90, 0.55, 0.60,
0.90, 0.75, 0.70,
0.80, 0.70, 0.85,
0.90, 0.95, 0.75,
0.80, 0.85, 0.80,
0.60, 0.90, 0.95,
0.60, 0.60, 0.20,
0.50, 0.90, 0.80,
0.90, 0.75, 0.70,
0.90, 0.65, 0.90,
0.90, 0.55, 0.60,
0.60, 0.80, 0.40
],
i2: [ # roi
1, 1, 10, 10, 0, 0, 10, 10, 0, 0, 10, 10,
2, 2, 11, 11, 1, 1, 11, 11, 1, 1, 11, 11,
3, 3, 12, 12, 2, 2, 12, 12, 2, 2, 12, 12,
4, 4, 13, 13, 3, 3, 13, 13, 3, 3, 13, 13,
5, 5, 14, 14, 4, 4, 14, 14, 4, 4, 14, 14,
6, 6, 15, 15, 5, 5, 15, 15, 5, 5, 15, 15,
7, 7, 16, 16, 6, 6, 16, 16, 6, 6, 16, 16,
8, 8, 17, 17, 7, 7, 17, 17, 7, 7, 17, 17,
9, 9, 18, 18, 8, 8, 18, 18, 8, 8, 18, 18,
2, 2, 11, 11, 2, 2, 12, 12, 2, 2, 12, 12,
1, 1, 10, 10, 1, 1, 11, 11, 1, 1, 11, 11,
5, 5, 14, 14, 5, 5, 15, 15, 5, 5, 15, 15,
3, 3, 12, 12, 3, 3, 13, 13, 3, 3, 13, 13,
6, 6, 15, 15, 6, 6, 16, 16, 6, 6, 16, 16,
0, 0, 1, 1, 0, 0, 2, 2, 0, 0, 2, 2,
9, 9, 18, 18, 9, 9, 19, 19, 9, 9, 19, 19,
4, 4, 13, 13, 4, 4, 14, 14, 4, 4, 14, 14,
8, 8, 17, 17, 8, 8, 18, 18, 8, 8, 18, 18,
7, 7, 16, 16, 7, 7, 17, 17, 7, 7, 17, 17
],
i3: [1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3] # batch split
}
output0 = {
o1: [
0.95, 0.7879927, 0.52485234, 0.95, 0.6894936,
0.95, 0.89983034, 0.7879927, 0.95, 0.8
],
o2: [
0, 0, 10, 10,
6, 6, 16, 16,
2, 2, 12, 12,
2, 2, 12, 12,
8, 8, 18, 18,
1, 1, 11, 11,
0, 0, 2, 2,
7, 7, 17, 17,
3, 3, 13, 13,
0, 0, 2, 2,
],
o3: [1, 1, 1, 2, 2, 1, 1, 1, 2, 2],
o4: [1, 1, 1, 1, 1, 3, 3, 3, 3, 3],
}
Example((input0, output0)).AddVariations("relaxed", "float16", quant8)
| 34.376884
| 105
| 0.466306
| 1,339
| 6,841
| 2.347274
| 0.113518
| 0.036907
| 0.031499
| 0.031817
| 0.804327
| 0.7951
| 0.789373
| 0.695514
| 0.633471
| 0.617245
| 0
| 0.361063
| 0.323491
| 6,841
| 198
| 106
| 34.550505
| 0.318064
| 0.136237
| 0
| 0.821429
| 0
| 0
| 0.101175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed85cc7b46ba1c4b58eb9b6c04a113875df7618c
| 14,056
|
py
|
Python
|
sdk/python/pulumi_openstack/keymanager/_inputs.py
|
ederst/pulumi-openstack
|
e11af3f9ade3bc0de8b3feb9db5a9e86ad5ba989
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_openstack/keymanager/_inputs.py
|
ederst/pulumi-openstack
|
e11af3f9ade3bc0de8b3feb9db5a9e86ad5ba989
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-03-11T09:01:32.000Z
|
2021-03-11T09:01:32.000Z
|
sdk/python/pulumi_openstack/keymanager/_inputs.py
|
ederst/pulumi-openstack
|
e11af3f9ade3bc0de8b3feb9db5a9e86ad5ba989
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
__all__ = [
'ContainerV1AclArgs',
'ContainerV1AclReadArgs',
'ContainerV1ConsumerArgs',
'ContainerV1SecretRefArgs',
'OrderV1MetaArgs',
'SecretV1AclArgs',
'SecretV1AclReadArgs',
]
@pulumi.input_type
class ContainerV1AclArgs:
def __init__(__self__, *,
read: Optional[pulumi.Input['ContainerV1AclReadArgs']] = None):
if read is not None:
pulumi.set(__self__, "read", read)
@property
@pulumi.getter
def read(self) -> Optional[pulumi.Input['ContainerV1AclReadArgs']]:
return pulumi.get(self, "read")
@read.setter
def read(self, value: Optional[pulumi.Input['ContainerV1AclReadArgs']]):
pulumi.set(self, "read", value)
@pulumi.input_type
class ContainerV1AclReadArgs:
def __init__(__self__, *,
created_at: Optional[pulumi.Input[str]] = None,
project_access: Optional[pulumi.Input[bool]] = None,
updated_at: Optional[pulumi.Input[str]] = None,
users: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] created_at: The date the container ACL was created.
:param pulumi.Input[bool] project_access: Whether the container is accessible project wide.
Defaults to `true`.
:param pulumi.Input[str] updated_at: The date the container ACL was last updated.
:param pulumi.Input[Sequence[pulumi.Input[str]]] users: The list of user IDs, which are allowed to access the
container, when `project_access` is set to `false`.
"""
if created_at is not None:
pulumi.set(__self__, "created_at", created_at)
if project_access is not None:
pulumi.set(__self__, "project_access", project_access)
if updated_at is not None:
pulumi.set(__self__, "updated_at", updated_at)
if users is not None:
pulumi.set(__self__, "users", users)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> Optional[pulumi.Input[str]]:
"""
The date the container ACL was created.
"""
return pulumi.get(self, "created_at")
@created_at.setter
def created_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "created_at", value)
@property
@pulumi.getter(name="projectAccess")
def project_access(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the container is accessible project wide.
Defaults to `true`.
"""
return pulumi.get(self, "project_access")
@project_access.setter
def project_access(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "project_access", value)
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> Optional[pulumi.Input[str]]:
"""
The date the container ACL was last updated.
"""
return pulumi.get(self, "updated_at")
@updated_at.setter
def updated_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "updated_at", value)
@property
@pulumi.getter
def users(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of user IDs, which are allowed to access the
container, when `project_access` is set to `false`.
"""
return pulumi.get(self, "users")
@users.setter
def users(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "users", value)
@pulumi.input_type
class ContainerV1ConsumerArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] name: The name of the secret reference. The reference names must correspond the container type, more details are available [here](https://docs.openstack.org/barbican/stein/api/reference/containers.html).
:param pulumi.Input[str] url: The consumer URL.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the secret reference. The reference names must correspond the container type, more details are available [here](https://docs.openstack.org/barbican/stein/api/reference/containers.html).
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
The consumer URL.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class ContainerV1SecretRefArgs:
def __init__(__self__, *,
secret_ref: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] secret_ref: The secret reference / where to find the secret, URL.
:param pulumi.Input[str] name: The name of the secret reference. The reference names must correspond the container type, more details are available [here](https://docs.openstack.org/barbican/stein/api/reference/containers.html).
"""
pulumi.set(__self__, "secret_ref", secret_ref)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="secretRef")
def secret_ref(self) -> pulumi.Input[str]:
"""
The secret reference / where to find the secret, URL.
"""
return pulumi.get(self, "secret_ref")
@secret_ref.setter
def secret_ref(self, value: pulumi.Input[str]):
pulumi.set(self, "secret_ref", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the secret reference. The reference names must correspond the container type, more details are available [here](https://docs.openstack.org/barbican/stein/api/reference/containers.html).
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class OrderV1MetaArgs:
def __init__(__self__, *,
algorithm: pulumi.Input[str],
bit_length: pulumi.Input[int],
expiration: Optional[pulumi.Input[str]] = None,
mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
payload_content_type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] algorithm: Algorithm to use for key generation.
:param pulumi.Input[int] bit_length: - Bit lenght of key to be generated.
:param pulumi.Input[str] expiration: This is a UTC timestamp in ISO 8601 format YYYY-MM-DDTHH:MM:SSZ. If set, the secret will not be available after this time.
:param pulumi.Input[str] mode: The mode to use for key generation.
:param pulumi.Input[str] name: The name of the secret set by the user.
:param pulumi.Input[str] payload_content_type: The media type for the content of the secrets payload. Must be one of `text/plain`, `text/plain;charset=utf-8`, `text/plain; charset=utf-8`, `application/octet-stream`, `application/pkcs8`.
"""
pulumi.set(__self__, "algorithm", algorithm)
pulumi.set(__self__, "bit_length", bit_length)
if expiration is not None:
pulumi.set(__self__, "expiration", expiration)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if name is not None:
pulumi.set(__self__, "name", name)
if payload_content_type is not None:
pulumi.set(__self__, "payload_content_type", payload_content_type)
@property
@pulumi.getter
def algorithm(self) -> pulumi.Input[str]:
"""
Algorithm to use for key generation.
"""
return pulumi.get(self, "algorithm")
@algorithm.setter
def algorithm(self, value: pulumi.Input[str]):
pulumi.set(self, "algorithm", value)
@property
@pulumi.getter(name="bitLength")
def bit_length(self) -> pulumi.Input[int]:
"""
- Bit lenght of key to be generated.
"""
return pulumi.get(self, "bit_length")
@bit_length.setter
def bit_length(self, value: pulumi.Input[int]):
pulumi.set(self, "bit_length", value)
@property
@pulumi.getter
def expiration(self) -> Optional[pulumi.Input[str]]:
"""
This is a UTC timestamp in ISO 8601 format YYYY-MM-DDTHH:MM:SSZ. If set, the secret will not be available after this time.
"""
return pulumi.get(self, "expiration")
@expiration.setter
def expiration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expiration", value)
@property
@pulumi.getter
def mode(self) -> Optional[pulumi.Input[str]]:
"""
The mode to use for key generation.
"""
return pulumi.get(self, "mode")
@mode.setter
def mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mode", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the secret set by the user.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="payloadContentType")
def payload_content_type(self) -> Optional[pulumi.Input[str]]:
"""
The media type for the content of the secrets payload. Must be one of `text/plain`, `text/plain;charset=utf-8`, `text/plain; charset=utf-8`, `application/octet-stream`, `application/pkcs8`.
"""
return pulumi.get(self, "payload_content_type")
@payload_content_type.setter
def payload_content_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "payload_content_type", value)
@pulumi.input_type
class SecretV1AclArgs:
def __init__(__self__, *,
read: Optional[pulumi.Input['SecretV1AclReadArgs']] = None):
if read is not None:
pulumi.set(__self__, "read", read)
@property
@pulumi.getter
def read(self) -> Optional[pulumi.Input['SecretV1AclReadArgs']]:
return pulumi.get(self, "read")
@read.setter
def read(self, value: Optional[pulumi.Input['SecretV1AclReadArgs']]):
pulumi.set(self, "read", value)
@pulumi.input_type
class SecretV1AclReadArgs:
def __init__(__self__, *,
created_at: Optional[pulumi.Input[str]] = None,
project_access: Optional[pulumi.Input[bool]] = None,
updated_at: Optional[pulumi.Input[str]] = None,
users: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] created_at: The date the secret ACL was created.
:param pulumi.Input[bool] project_access: Whether the secret is accessible project wide.
Defaults to `true`.
:param pulumi.Input[str] updated_at: The date the secret ACL was last updated.
:param pulumi.Input[Sequence[pulumi.Input[str]]] users: The list of user IDs, which are allowed to access the
secret, when `project_access` is set to `false`.
"""
if created_at is not None:
pulumi.set(__self__, "created_at", created_at)
if project_access is not None:
pulumi.set(__self__, "project_access", project_access)
if updated_at is not None:
pulumi.set(__self__, "updated_at", updated_at)
if users is not None:
pulumi.set(__self__, "users", users)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> Optional[pulumi.Input[str]]:
"""
The date the secret ACL was created.
"""
return pulumi.get(self, "created_at")
@created_at.setter
def created_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "created_at", value)
@property
@pulumi.getter(name="projectAccess")
def project_access(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the secret is accessible project wide.
Defaults to `true`.
"""
return pulumi.get(self, "project_access")
@project_access.setter
def project_access(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "project_access", value)
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> Optional[pulumi.Input[str]]:
"""
The date the secret ACL was last updated.
"""
return pulumi.get(self, "updated_at")
@updated_at.setter
def updated_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "updated_at", value)
@property
@pulumi.getter
def users(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of user IDs, which are allowed to access the
secret, when `project_access` is set to `false`.
"""
return pulumi.get(self, "users")
@users.setter
def users(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "users", value)
| 36.699739
| 244
| 0.632186
| 1,722
| 14,056
| 5.014518
| 0.092334
| 0.118471
| 0.097279
| 0.084076
| 0.825478
| 0.770353
| 0.739664
| 0.712913
| 0.69612
| 0.631036
| 0
| 0.003305
| 0.246585
| 14,056
| 382
| 245
| 36.795812
| 0.812087
| 0.268924
| 0
| 0.637931
| 1
| 0
| 0.088678
| 0.014101
| 0
| 0
| 0
| 0
| 0
| 1
| 0.202586
| false
| 0
| 0.021552
| 0.008621
| 0.340517
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c2527f3a8f0bc56dad86fc5d1323d3855e7be0a
| 188
|
py
|
Python
|
fabfile.py
|
JNRowe-retired/Dolt
|
ffa670fe10c62a9fc788112c893aa8fc007168c9
|
[
"BSD-3-Clause"
] | 8
|
2015-11-05T15:42:26.000Z
|
2021-08-17T09:18:27.000Z
|
fabfile.py
|
JNRowe-retired/Dolt
|
ffa670fe10c62a9fc788112c893aa8fc007168c9
|
[
"BSD-3-Clause"
] | null | null | null |
fabfile.py
|
JNRowe-retired/Dolt
|
ffa670fe10c62a9fc788112c893aa8fc007168c9
|
[
"BSD-3-Clause"
] | 1
|
2021-03-12T18:51:23.000Z
|
2021-03-12T18:51:23.000Z
|
# TODO: automate building of sdist
# TODO: automate uploading of sdist to github.com
from fabric.api import local
def release():
local('python setup.py sdist upload', capture=False)
| 23.5
| 56
| 0.75
| 28
| 188
| 5.035714
| 0.785714
| 0.170213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164894
| 188
| 7
| 57
| 26.857143
| 0.898089
| 0.425532
| 0
| 0
| 0
| 0
| 0.269231
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9c758d0ddb086c37a167b9d8c42fc6e494bf937f
| 80,028
|
py
|
Python
|
mux_python/api/assets_api.py
|
gts-work/mux-python
|
826e52730bad7acd08c31a3e1951a281521f1b4f
|
[
"MIT"
] | null | null | null |
mux_python/api/assets_api.py
|
gts-work/mux-python
|
826e52730bad7acd08c31a3e1951a281521f1b4f
|
[
"MIT"
] | null | null | null |
mux_python/api/assets_api.py
|
gts-work/mux-python
|
826e52730bad7acd08c31a3e1951a281521f1b4f
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Mux API
Mux is how developers build online video. This API encompasses both Mux Video and Mux Data functionality to help you build your video-related projects better and faster than ever before. # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from mux_python.api_client import ApiClient
from mux_python.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class AssetsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_asset(self, create_asset_request, **kwargs): # noqa: E501
"""Create an asset # noqa: E501
Create a new Mux Video asset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_asset(create_asset_request, async_req=True)
>>> result = thread.get()
:param create_asset_request: (required)
:type create_asset_request: CreateAssetRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: AssetResponse
"""
kwargs['_return_http_data_only'] = True
return self.create_asset_with_http_info(create_asset_request, **kwargs) # noqa: E501
def create_asset_with_http_info(self, create_asset_request, **kwargs): # noqa: E501
"""Create an asset # noqa: E501
Create a new Mux Video asset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_asset_with_http_info(create_asset_request, async_req=True)
>>> result = thread.get()
:param create_asset_request: (required)
:type create_asset_request: CreateAssetRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(AssetResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'create_asset_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_asset" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'create_asset_request' is set
if self.api_client.client_side_validation and ('create_asset_request' not in local_var_params or # noqa: E501
local_var_params['create_asset_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `create_asset_request` when calling `create_asset`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_asset_request' in local_var_params:
body_params = local_var_params['create_asset_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {
201: "AssetResponse",
}
return self.api_client.call_api(
'/video/v1/assets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_asset_playback_id(self, asset_id, create_playback_id_request, **kwargs): # noqa: E501
"""Create a playback ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_asset_playback_id(asset_id, create_playback_id_request, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param create_playback_id_request: (required)
:type create_playback_id_request: CreatePlaybackIDRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: CreatePlaybackIDResponse
"""
kwargs['_return_http_data_only'] = True
return self.create_asset_playback_id_with_http_info(asset_id, create_playback_id_request, **kwargs) # noqa: E501
def create_asset_playback_id_with_http_info(self, asset_id, create_playback_id_request, **kwargs): # noqa: E501
"""Create a playback ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_asset_playback_id_with_http_info(asset_id, create_playback_id_request, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param create_playback_id_request: (required)
:type create_playback_id_request: CreatePlaybackIDRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(CreatePlaybackIDResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'asset_id',
'create_playback_id_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_asset_playback_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `create_asset_playback_id`") # noqa: E501
# verify the required parameter 'create_playback_id_request' is set
if self.api_client.client_side_validation and ('create_playback_id_request' not in local_var_params or # noqa: E501
local_var_params['create_playback_id_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `create_playback_id_request` when calling `create_asset_playback_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_playback_id_request' in local_var_params:
body_params = local_var_params['create_playback_id_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {
201: "CreatePlaybackIDResponse",
}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}/playback-ids', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_asset_track(self, asset_id, create_track_request, **kwargs): # noqa: E501
"""Create an asset track # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_asset_track(asset_id, create_track_request, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param create_track_request: (required)
:type create_track_request: CreateTrackRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: CreateTrackResponse
"""
kwargs['_return_http_data_only'] = True
return self.create_asset_track_with_http_info(asset_id, create_track_request, **kwargs) # noqa: E501
def create_asset_track_with_http_info(self, asset_id, create_track_request, **kwargs): # noqa: E501
"""Create an asset track # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_asset_track_with_http_info(asset_id, create_track_request, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param create_track_request: (required)
:type create_track_request: CreateTrackRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(CreateTrackResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'asset_id',
'create_track_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_asset_track" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `create_asset_track`") # noqa: E501
# verify the required parameter 'create_track_request' is set
if self.api_client.client_side_validation and ('create_track_request' not in local_var_params or # noqa: E501
local_var_params['create_track_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `create_track_request` when calling `create_asset_track`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_track_request' in local_var_params:
body_params = local_var_params['create_track_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {
201: "CreateTrackResponse",
}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}/tracks', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def delete_asset(self, asset_id, **kwargs): # noqa: E501
"""Delete an asset # noqa: E501
Deletes a video asset and all its data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_asset(asset_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.delete_asset_with_http_info(asset_id, **kwargs) # noqa: E501
def delete_asset_with_http_info(self, asset_id, **kwargs): # noqa: E501
"""Delete an asset # noqa: E501
Deletes a video asset and all its data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_asset_with_http_info(asset_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'asset_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_asset" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `delete_asset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def delete_asset_playback_id(self, asset_id, playback_id, **kwargs): # noqa: E501
"""Delete a playback ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_asset_playback_id(asset_id, playback_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param playback_id: The live stream's playback ID. (required)
:type playback_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.delete_asset_playback_id_with_http_info(asset_id, playback_id, **kwargs) # noqa: E501
def delete_asset_playback_id_with_http_info(self, asset_id, playback_id, **kwargs): # noqa: E501
"""Delete a playback ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_asset_playback_id_with_http_info(asset_id, playback_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param playback_id: The live stream's playback ID. (required)
:type playback_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'asset_id',
'playback_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_asset_playback_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `delete_asset_playback_id`") # noqa: E501
# verify the required parameter 'playback_id' is set
if self.api_client.client_side_validation and ('playback_id' not in local_var_params or # noqa: E501
local_var_params['playback_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `playback_id` when calling `delete_asset_playback_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
if 'playback_id' in local_var_params:
path_params['PLAYBACK_ID'] = local_var_params['playback_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}/playback-ids/{PLAYBACK_ID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def delete_asset_track(self, asset_id, track_id, **kwargs): # noqa: E501
"""Delete an asset track # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_asset_track(asset_id, track_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param track_id: The track ID. (required)
:type track_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.delete_asset_track_with_http_info(asset_id, track_id, **kwargs) # noqa: E501
def delete_asset_track_with_http_info(self, asset_id, track_id, **kwargs): # noqa: E501
"""Delete an asset track # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_asset_track_with_http_info(asset_id, track_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param track_id: The track ID. (required)
:type track_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'asset_id',
'track_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_asset_track" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `delete_asset_track`") # noqa: E501
# verify the required parameter 'track_id' is set
if self.api_client.client_side_validation and ('track_id' not in local_var_params or # noqa: E501
local_var_params['track_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `track_id` when calling `delete_asset_track`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
if 'track_id' in local_var_params:
path_params['TRACK_ID'] = local_var_params['track_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}/tracks/{TRACK_ID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_asset(self, asset_id, **kwargs): # noqa: E501
"""Retrieve an asset # noqa: E501
Retrieves the details of an asset that has previously been created. Supply the unique asset ID that was returned from your previous request, and Mux will return the corresponding asset information. The same information is returned when creating an asset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_asset(asset_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: AssetResponse
"""
kwargs['_return_http_data_only'] = True
return self.get_asset_with_http_info(asset_id, **kwargs) # noqa: E501
def get_asset_with_http_info(self, asset_id, **kwargs): # noqa: E501
"""Retrieve an asset # noqa: E501
Retrieves the details of an asset that has previously been created. Supply the unique asset ID that was returned from your previous request, and Mux will return the corresponding asset information. The same information is returned when creating an asset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_asset_with_http_info(asset_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(AssetResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'asset_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_asset" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `get_asset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {
200: "AssetResponse",
}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_asset_input_info(self, asset_id, **kwargs): # noqa: E501
"""Retrieve asset input info # noqa: E501
Returns a list of the input objects that were used to create the asset along with any settings that were applied to each input. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_asset_input_info(asset_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: GetAssetInputInfoResponse
"""
kwargs['_return_http_data_only'] = True
return self.get_asset_input_info_with_http_info(asset_id, **kwargs) # noqa: E501
def get_asset_input_info_with_http_info(self, asset_id, **kwargs): # noqa: E501
"""Retrieve asset input info # noqa: E501
Returns a list of the input objects that were used to create the asset along with any settings that were applied to each input. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_asset_input_info_with_http_info(asset_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(GetAssetInputInfoResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'asset_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_asset_input_info" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `get_asset_input_info`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {
200: "GetAssetInputInfoResponse",
}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}/input-info', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_asset_playback_id(self, asset_id, playback_id, **kwargs): # noqa: E501
"""Retrieve a playback ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_asset_playback_id(asset_id, playback_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param playback_id: The live stream's playback ID. (required)
:type playback_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: GetAssetPlaybackIDResponse
"""
kwargs['_return_http_data_only'] = True
return self.get_asset_playback_id_with_http_info(asset_id, playback_id, **kwargs) # noqa: E501
def get_asset_playback_id_with_http_info(self, asset_id, playback_id, **kwargs): # noqa: E501
"""Retrieve a playback ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_asset_playback_id_with_http_info(asset_id, playback_id, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param playback_id: The live stream's playback ID. (required)
:type playback_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(GetAssetPlaybackIDResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'asset_id',
'playback_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_asset_playback_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `get_asset_playback_id`") # noqa: E501
# verify the required parameter 'playback_id' is set
if self.api_client.client_side_validation and ('playback_id' not in local_var_params or # noqa: E501
local_var_params['playback_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `playback_id` when calling `get_asset_playback_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
if 'playback_id' in local_var_params:
path_params['PLAYBACK_ID'] = local_var_params['playback_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {
200: "GetAssetPlaybackIDResponse",
}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}/playback-ids/{PLAYBACK_ID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def list_assets(self, **kwargs): # noqa: E501
"""List assets # noqa: E501
List all Mux assets. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_assets(async_req=True)
>>> result = thread.get()
:param limit: Number of items to include in the response
:type limit: int
:param page: Offset by this many pages, of the size of `limit`
:type page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ListAssetsResponse
"""
kwargs['_return_http_data_only'] = True
return self.list_assets_with_http_info(**kwargs) # noqa: E501
def list_assets_with_http_info(self, **kwargs): # noqa: E501
"""List assets # noqa: E501
List all Mux assets. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_assets_with_http_info(async_req=True)
>>> result = thread.get()
:param limit: Number of items to include in the response
:type limit: int
:param page: Offset by this many pages, of the size of `limit`
:type page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ListAssetsResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'limit',
'page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_assets" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {
200: "ListAssetsResponse",
}
return self.api_client.call_api(
'/video/v1/assets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def update_asset_master_access(self, asset_id, update_asset_master_access_request, **kwargs): # noqa: E501
"""Update master access # noqa: E501
Allows you to add temporary access to the master (highest-quality) version of the asset in MP4 format. A URL will be created that can be used to download the master version for 24 hours. After 24 hours Master Access will revert to \"none\". This master version is not optimized for web and not meant to be streamed, only downloaded for purposes like archiving or editing the video offline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_asset_master_access(asset_id, update_asset_master_access_request, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param update_asset_master_access_request: (required)
:type update_asset_master_access_request: UpdateAssetMasterAccessRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: AssetResponse
"""
kwargs['_return_http_data_only'] = True
return self.update_asset_master_access_with_http_info(asset_id, update_asset_master_access_request, **kwargs) # noqa: E501
def update_asset_master_access_with_http_info(self, asset_id, update_asset_master_access_request, **kwargs): # noqa: E501
"""Update master access # noqa: E501
Allows you to add temporary access to the master (highest-quality) version of the asset in MP4 format. A URL will be created that can be used to download the master version for 24 hours. After 24 hours Master Access will revert to \"none\". This master version is not optimized for web and not meant to be streamed, only downloaded for purposes like archiving or editing the video offline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_asset_master_access_with_http_info(asset_id, update_asset_master_access_request, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param update_asset_master_access_request: (required)
:type update_asset_master_access_request: UpdateAssetMasterAccessRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(AssetResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'asset_id',
'update_asset_master_access_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_asset_master_access" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `update_asset_master_access`") # noqa: E501
# verify the required parameter 'update_asset_master_access_request' is set
if self.api_client.client_side_validation and ('update_asset_master_access_request' not in local_var_params or # noqa: E501
local_var_params['update_asset_master_access_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `update_asset_master_access_request` when calling `update_asset_master_access`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_asset_master_access_request' in local_var_params:
body_params = local_var_params['update_asset_master_access_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {
200: "AssetResponse",
}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}/master-access', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def update_asset_mp4_support(self, asset_id, update_asset_mp4_support_request, **kwargs): # noqa: E501
"""Update MP4 support # noqa: E501
Allows you to add or remove mp4 support for assets that were created without it. Currently there are two values supported in this request, `standard` and `none`. `none` means that an asset *does not* have mp4 support, so submitting a request with `mp4_support` set to `none` will delete the mp4 assets from the asset in question. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_asset_mp4_support(asset_id, update_asset_mp4_support_request, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param update_asset_mp4_support_request: (required)
:type update_asset_mp4_support_request: UpdateAssetMP4SupportRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: AssetResponse
"""
kwargs['_return_http_data_only'] = True
return self.update_asset_mp4_support_with_http_info(asset_id, update_asset_mp4_support_request, **kwargs) # noqa: E501
def update_asset_mp4_support_with_http_info(self, asset_id, update_asset_mp4_support_request, **kwargs): # noqa: E501
"""Update MP4 support # noqa: E501
Allows you to add or remove mp4 support for assets that were created without it. Currently there are two values supported in this request, `standard` and `none`. `none` means that an asset *does not* have mp4 support, so submitting a request with `mp4_support` set to `none` will delete the mp4 assets from the asset in question. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_asset_mp4_support_with_http_info(asset_id, update_asset_mp4_support_request, async_req=True)
>>> result = thread.get()
:param asset_id: The asset ID. (required)
:type asset_id: str
:param update_asset_mp4_support_request: (required)
:type update_asset_mp4_support_request: UpdateAssetMP4SupportRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(AssetResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'asset_id',
'update_asset_mp4_support_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_asset_mp4_support" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'asset_id' is set
if self.api_client.client_side_validation and ('asset_id' not in local_var_params or # noqa: E501
local_var_params['asset_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `asset_id` when calling `update_asset_mp4_support`") # noqa: E501
# verify the required parameter 'update_asset_mp4_support_request' is set
if self.api_client.client_side_validation and ('update_asset_mp4_support_request' not in local_var_params or # noqa: E501
local_var_params['update_asset_mp4_support_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `update_asset_mp4_support_request` when calling `update_asset_mp4_support`") # noqa: E501
collection_formats = {}
path_params = {}
if 'asset_id' in local_var_params:
path_params['ASSET_ID'] = local_var_params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_asset_mp4_support_request' in local_var_params:
body_params = local_var_params['update_asset_mp4_support_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['accessToken'] # noqa: E501
response_types_map = {
200: "AssetResponse",
}
return self.api_client.call_api(
'/video/v1/assets/{ASSET_ID}/mp4-support', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 46.25896
| 411
| 0.603501
| 9,114
| 80,028
| 5.020408
| 0.032478
| 0.036717
| 0.056911
| 0.028324
| 0.972528
| 0.967851
| 0.965272
| 0.958519
| 0.943439
| 0.940336
| 0
| 0.012778
| 0.328173
| 80,028
| 1,729
| 412
| 46.285714
| 0.838259
| 0.480307
| 0
| 0.73456
| 0
| 0
| 0.194182
| 0.062744
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032852
| false
| 0
| 0.00657
| 0
| 0.072273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92e188ffb8651e4dc003ac3c3f570fe0147e2fe1
| 5,418
|
py
|
Python
|
graphql_utils/tests/test_multi.py
|
Mergifyio/graphql-utils
|
3ff796555e51a07500cf48081c9771121bcb9bc7
|
[
"Apache-2.0"
] | 6
|
2019-07-12T17:44:31.000Z
|
2020-08-18T17:01:51.000Z
|
graphql_utils/tests/test_multi.py
|
sthagen/graphql-utils
|
3ff796555e51a07500cf48081c9771121bcb9bc7
|
[
"Apache-2.0"
] | null | null | null |
graphql_utils/tests/test_multi.py
|
sthagen/graphql-utils
|
3ff796555e51a07500cf48081c9771121bcb9bc7
|
[
"Apache-2.0"
] | 3
|
2019-07-11T15:14:02.000Z
|
2021-11-29T16:20:07.000Z
|
import pytest
from graphql_utils import multi
@pytest.mark.asyncio
async def test_multi_query_limit():
Q0_first_result = {
"Q0": {
"collaborators": {
"nodes": [],
"pageInfo": {
"hasNextPage": False,
"endCursor": None,
},
},
},
}
Q1_first_result = {
"Q1": {
"collaborators": {
"nodes": [],
"pageInfo": {
"hasNextPage": True,
"endCursor": "magic==",
},
}
},
}
send_fn_calls = {"call": 0}
async def send_fn(query):
send_fn_calls["call"] += 1
if send_fn_calls["call"] == 1:
assert (
query
== """{
Q0: repository(owner: "jd", name: "foo") {
collaborators(first: 100) {
nodes {
login
}
pageInfo {
hasNextPage
endCursor
}
}
}
}"""
)
return {"data": Q0_first_result}
if send_fn_calls["call"] == 2:
assert (
query
== """{
Q0: repository(owner: "jd", name: "bar") {
collaborators(first: 100) {
nodes {
login
}
pageInfo {
hasNextPage
endCursor
}
}
}
}"""
)
return {"data": Q1_first_result}
assert (
query
== """{
Q0: repository(owner: "jd", name: "bar") {
collaborators(first: 100 after: "magic==" ) {
nodes {
login
}
pageInfo {
hasNextPage
endCursor
}
}
}
}"""
)
return {"data": {}}
repos = (
{
"owner": "jd",
"name": "foo",
},
{
"owner": "jd",
"name": "bar",
},
)
iterable_result = multi.multi_query(
"""repository(owner: "{owner}", name: "{name}") {{
collaborators(first: 100{{after}}) {{
nodes {{
login
}}
pageInfo {{
hasNextPage
endCursor
}}
}}
}}""",
repos,
send_fn,
("collaborators", "pageInfo"),
max_batch_size=1,
)
results = [result async for result in iterable_result]
assert len(results) == 2
assert results[0] == Q0_first_result
assert results[1] == Q1_first_result
@pytest.mark.asyncio
async def test_multi_query():
Q0_first_result = {
"Q0": {
"collaborators": {
"nodes": [],
"pageInfo": {
"hasNextPage": False,
"endCursor": None,
},
},
},
}
Q1_first_result = {
"Q1": {
"collaborators": {
"nodes": [],
"pageInfo": {
"hasNextPage": True,
"endCursor": "magic==",
},
}
},
}
first_result = {**Q0_first_result, **Q1_first_result}
send_fn_calls = {"call": 0}
async def send_fn(query):
send_fn_calls["call"] += 1
if send_fn_calls["call"] == 1:
assert (
query
== """{
Q0: repository(owner: "jd", name: "foo") {
collaborators(first: 100) {
nodes {
login
}
pageInfo {
hasNextPage
endCursor
}
}
}
Q1: repository(owner: "jd", name: "bar") {
collaborators(first: 100) {
nodes {
login
}
pageInfo {
hasNextPage
endCursor
}
}
}
}"""
)
return {"data": first_result}
assert (
query
== """{
Q0: repository(owner: "jd", name: "bar") {
collaborators(first: 100 after: "magic==" ) {
nodes {
login
}
pageInfo {
hasNextPage
endCursor
}
}
}
}"""
)
return {"data": {}}
repos = (
{
"owner": "jd",
"name": "foo",
},
{
"owner": "jd",
"name": "bar",
},
)
iterable_result = multi.multi_query(
"""repository(owner: "{owner}", name: "{name}") {{
collaborators(first: 100{{after}}) {{
nodes {{
login
}}
pageInfo {{
hasNextPage
endCursor
}}
}}
}}""",
repos,
send_fn,
("collaborators", "pageInfo"),
)
results = [result async for result in iterable_result]
assert len(results) == 1
assert results[0] == first_result
| 23.153846
| 58
| 0.35179
| 351
| 5,418
| 5.273504
| 0.156695
| 0.077256
| 0.059427
| 0.125338
| 0.895732
| 0.886548
| 0.886548
| 0.886548
| 0.844408
| 0.844408
| 0
| 0.0223
| 0.528239
| 5,418
| 233
| 59
| 23.253219
| 0.701878
| 0
| 0
| 0.617347
| 0
| 0
| 0.431988
| 0
| 0
| 0
| 0
| 0
| 0.05102
| 1
| 0
| false
| 0
| 0.010204
| 0
| 0.035714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92f49b5d84819049cfcf729262d588eac218f6fd
| 31,989
|
py
|
Python
|
auth-api/tests/unit/api/test_org.py
|
jeznorth/sbc-auth
|
12e2a308a6035629e6cc285980497b69b44a5a5d
|
[
"Apache-2.0"
] | null | null | null |
auth-api/tests/unit/api/test_org.py
|
jeznorth/sbc-auth
|
12e2a308a6035629e6cc285980497b69b44a5a5d
|
[
"Apache-2.0"
] | null | null | null |
auth-api/tests/unit/api/test_org.py
|
jeznorth/sbc-auth
|
12e2a308a6035629e6cc285980497b69b44a5a5d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests to verify the orgs API end-point.
Test-Suite to ensure that the /orgs endpoint is working as expected.
"""
import json
from unittest.mock import patch
from auth_api import status as http_status
from auth_api.exceptions import BusinessException
from auth_api.exceptions.errors import Error
from auth_api.services import Affiliation as AffiliationService
from auth_api.services import Invitation as InvitationService
from auth_api.services import Org as OrgService
from auth_api.services import User as UserService
from tests.utilities.factory_scenarios import (
TestAffliationInfo, TestContactInfo, TestEntityInfo, TestJwtClaims, TestOrgInfo)
from tests.utilities.factory_utils import factory_auth_header, factory_invitation
def test_add_org(client, jwt, session): # pylint:disable=unused-argument
"""Assert that an org can be POSTed."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_201_CREATED
def test_add_same_org_409(client, jwt, session): # pylint:disable=unused-argument
"""Assert that an org can be POSTed."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_201_CREATED
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_409_CONFLICT
def test_add_org_invalid_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that POSTing an invalid org returns a 400."""
headers = factory_auth_header(jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_add_org_invalid_space_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that POSTing an invalid org returns a 400."""
headers = factory_auth_header(jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid_name_space),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_add_org_invalid_spaces_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that POSTing an invalid org returns a 400."""
headers = factory_auth_header(jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid_name_spaces),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_add_org_invalid_end_space_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that POSTing an invalid org returns a 400."""
headers = factory_auth_header(jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid_name_end_space),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_add_org_invalid_start_space_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that POSTing an invalid org returns a 400."""
headers = factory_auth_header(jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid_name_start_space),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_add_org_invalid_returns_401(client, jwt, session): # pylint:disable=unused-argument
"""Assert that POSTing an invalid org returns a 401."""
headers = factory_auth_header(jwt, claims=TestJwtClaims.view_role)
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED
def test_add_org_invalid_user_returns_401(client, jwt, session): # pylint:disable=unused-argument
"""Assert that POSTing an org with invalid user returns a 401."""
headers = factory_auth_header(jwt, claims=TestJwtClaims.edit_role)
with patch.object(UserService, 'find_by_jwt_token', return_value=None):
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED
def test_add_org_invalid_returns_exception(client, jwt, session): # pylint:disable=unused-argument
"""Assert that POSTing an invalid org returns an exception."""
headers = factory_auth_header(jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
with patch.object(OrgService, 'create_org', side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)):
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
assert rv.status_code == 400
def test_get_org(client, jwt, session): # pylint:disable=unused-argument
"""Assert that an org can be retrieved via GET."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.get('/api/v1/orgs/{}'.format(org_id),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_200_OK
dictionary = json.loads(rv.data)
assert dictionary['id'] == org_id
def test_get_org_no_auth_returns_401(client, jwt, session): # pylint:disable=unused-argument
"""Assert that an org cannot be retrieved without an authorization header."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.get('/api/v1/orgs/{}'.format(org_id),
headers=None, content_type='application/json')
assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED
def test_get_org_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument
"""Assert that attempting to retrieve a non-existent org returns a 404."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.get('/api/v1/orgs/{}'.format(999),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_404_NOT_FOUND
def test_update_org(client, jwt, session): # pylint:disable=unused-argument
"""Assert that an org can be updated via PUT."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_200_OK
dictionary = json.loads(rv.data)
assert dictionary['id'] == org_id
def test_update_org_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that an org can not be updated and return 400 error via PUT."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps(TestOrgInfo.invalid),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_update_org_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument
"""Assert that attempting to update a non-existent org returns a 404."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.put('/api/v1/orgs/{}'.format(999), data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_404_NOT_FOUND
def test_update_org_returns_exception(client, jwt, session): # pylint:disable=unused-argument
"""Assert that attempting to update a non-existent org returns an exception."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
with patch.object(OrgService, 'update_org', side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)):
rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
assert rv.status_code == 400
def test_add_contact(client, jwt, session): # pylint:disable=unused-argument
"""Assert that a contact can be added to an org."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
assert rv.status_code == http_status.HTTP_201_CREATED
dictionary = json.loads(rv.data)
assert len(dictionary['contacts']) == 1
assert dictionary['contacts'][0]['email'] == TestContactInfo.contact1['email']
def test_add_contact_invalid_format_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that adding an invalidly formatted contact returns a 400."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.invalid), content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_add_contact_valid_email_returns_201(client, jwt, session): # pylint:disable=unused-argument
"""Assert that adding an valid formatted contact with special characters in email returns a 201."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.email_valid), content_type='application/json')
assert rv.status_code == http_status.HTTP_201_CREATED
def test_add_contact_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument
"""Assert that adding a contact to a non-existant org returns 404."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/orgs/{}/contacts'.format(99),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
assert rv.status_code == http_status.HTTP_404_NOT_FOUND
def test_add_contact_duplicate_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that adding a duplicate contact to an org returns 400."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
client.post('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_update_contact(client, jwt, session): # pylint:disable=unused-argument
"""Assert that a contact can be updated on an org."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
assert rv.status_code == http_status.HTTP_201_CREATED
rv = client.put('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact2), content_type='application/json')
assert rv.status_code == http_status.HTTP_200_OK
dictionary = json.loads(rv.data)
assert len(dictionary['contacts']) == 1
assert dictionary['contacts'][0]['email'] == TestContactInfo.contact2['email']
def test_update_contact_invalid_format_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that updating with an invalidly formatted contact returns a 400."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
client.post('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
rv = client.put('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.invalid), content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_update_contact_valid_email_format_returns_200(client, jwt, session): # pylint:disable=unused-argument
"""Assert that updating with an validly formatted contact with special characters in email returns a 200."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
client.post('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
rv = client.put('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.email_valid), content_type='application/json')
assert rv.status_code == http_status.HTTP_200_OK
def test_update_contact_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument
"""Assert that updating a contact on a non-existant entity returns 404."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.put('/api/v1/orgs/{}/contacts'.format(99),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
assert rv.status_code == http_status.HTTP_404_NOT_FOUND
def test_update_contact_missing_returns_404(client, jwt, session): # pylint:disable=unused-argument
"""Assert that updating a non-existant contact returns 404."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.put('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
assert rv.status_code == http_status.HTTP_404_NOT_FOUND
def test_delete_contact(client, jwt, session): # pylint:disable=unused-argument
"""Assert that a contact can be deleted on an org."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
assert rv.status_code == http_status.HTTP_201_CREATED
rv = client.delete('/api/v1/orgs/{}/contacts'.format(org_id),
headers=headers, data=json.dumps(TestContactInfo.contact2), content_type='application/json')
assert rv.status_code == http_status.HTTP_200_OK
dictionary = json.loads(rv.data)
assert len(dictionary['contacts']) == 0
def test_delete_contact_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument
"""Assert that deleting a contact on a non-existant entity returns 404."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.delete('/api/v1/orgs/{}/contacts'.format(99),
headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json')
assert rv.status_code == http_status.HTTP_404_NOT_FOUND
def test_delete_contact_returns_exception(client, jwt, session): # pylint:disable=unused-argument
"""Assert that attempting to delete an org returns an exception."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
with patch.object(OrgService, 'delete_contact', side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)):
rv = client.delete('/api/v1/orgs/{}/contacts'.format(org_id), headers=headers, content_type='application/json')
assert rv.status_code == 400
def test_get_members(client, jwt, session): # pylint:disable=unused-argument
"""Assert that a list of members for an org can be retrieved."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.get('/api/v1/orgs/{}/members'.format(org_id),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_200_OK
dictionary = json.loads(rv.data)
assert dictionary['members']
assert len(dictionary['members']) == 1
assert dictionary['members'][0]['membershipTypeCode'] == 'OWNER'
def test_get_invitations(client, jwt, session): # pylint:disable=unused-argument
"""Assert that a list of invitations for an org can be retrieved."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id, 'abc123@email.com')),
headers=headers, content_type='application/json')
rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id, 'xyz456@email.com')),
headers=headers, content_type='application/json')
rv = client.get('/api/v1/orgs/{}/invitations'.format(org_id),
headers=headers, content_type='application/json')
assert rv.status_code == http_status.HTTP_200_OK
dictionary = json.loads(rv.data)
assert dictionary['invitations']
assert len(dictionary['invitations']) == 2
assert dictionary['invitations'][0]['recipientEmail'] == 'abc123@email.com'
assert dictionary['invitations'][1]['recipientEmail'] == 'xyz456@email.com'
def test_update_member(client, jwt, session, auth_mock): # pylint:disable=unused-argument
"""Assert that a member of an org can have their role updated."""
# Set up: create/login user, create org
headers_invitee = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers_invitee, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers_invitee, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
# Invite a user to the org
rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id, 'abc123@email.com')),
headers=headers_invitee, content_type='application/json')
dictionary = json.loads(rv.data)
invitation_id = dictionary['id']
invitation_id_token = InvitationService.generate_confirmation_token(invitation_id)
# Create/login as invited user
headers_invited = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role_2)
rv = client.post('/api/v1/users', headers=headers_invited, content_type='application/json')
# Accept invite as invited user
rv = client.put('/api/v1/invitations/tokens/{}'.format(invitation_id_token),
headers=headers_invited, content_type='application/json')
assert rv.status_code == http_status.HTTP_200_OK
dictionary = json.loads(rv.data)
assert dictionary['status'] == 'ACCEPTED'
# Get pending members for the org as invitee and assert length of 1
rv = client.get('/api/v1/orgs/{}/members?status=PENDING_APPROVAL'.format(org_id), headers=headers_invitee)
assert rv.status_code == http_status.HTTP_200_OK
dictionary = json.loads(rv.data)
assert dictionary['members']
assert len(dictionary['members']) == 1
# Find the pending member
new_member = dictionary['members'][0]
assert new_member['membershipTypeCode'] == 'MEMBER'
member_id = new_member['id']
# Update the new member
rv = client.patch('/api/v1/orgs/{}/members/{}'.format(org_id, member_id), headers=headers_invitee,
data=json.dumps({'role': 'ADMIN'}), content_type='application/json')
assert rv.status_code == http_status.HTTP_200_OK
dictionary = json.loads(rv.data)
assert dictionary['membershipTypeCode'] == 'ADMIN'
def test_add_affiliation(client, jwt, session): # pylint:disable=unused-argument
"""Assert that a contact can be added to an org."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode)
rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock),
headers=headers, content_type='application/json')
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers,
data=json.dumps(TestAffliationInfo.affiliation3), content_type='application/json')
assert rv.status_code == http_status.HTTP_201_CREATED
dictionary = json.loads(rv.data)
assert dictionary['org']['id'] == org_id
def test_add_affiliation_invalid_format_returns_400(client, jwt, session): # pylint:disable=unused-argument
"""Assert that adding an invalidly formatted affiliations returns a 400."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id),
headers=headers, data=json.dumps(TestAffliationInfo.invalid), content_type='application/json')
assert rv.status_code == http_status.HTTP_400_BAD_REQUEST
def test_add_affiliation_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument
"""Assert that adding a contact to a non-existant org returns 404."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/orgs/{}/affiliations'.format(99), headers=headers,
data=json.dumps(TestAffliationInfo.affliation1), content_type='application/json')
assert rv.status_code == http_status.HTTP_404_NOT_FOUND
def test_add_affiliation_returns_exception(client, jwt, session): # pylint:disable=unused-argument
"""Assert that attempting to delete an affiliation returns an exception."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode)
rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1),
headers=headers, content_type='application/json')
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
with patch.object(AffiliationService, 'create_affiliation',
side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)):
rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id),
data=json.dumps(TestAffliationInfo.affliation1),
headers=headers,
content_type='application/json')
assert rv.status_code == 400
def test_get_affiliations(client, jwt, session): # pylint:disable=unused-argument
"""Assert that a list of affiliation for an org can be retrieved."""
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode)
rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock),
headers=headers, content_type='application/json')
rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock2),
headers=headers, content_type='application/json')
headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role)
rv = client.post('/api/v1/users', headers=headers, content_type='application/json')
rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1),
headers=headers, content_type='application/json')
dictionary = json.loads(rv.data)
org_id = dictionary['id']
rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id),
data=json.dumps(TestAffliationInfo.affiliation3),
headers=headers,
content_type='application/json')
rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id),
data=json.dumps(TestAffliationInfo.affiliation4),
headers=headers,
content_type='application/json')
rv = client.get('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers)
assert rv.status_code == http_status.HTTP_200_OK
affiliations = json.loads(rv.data)
assert affiliations[0]['businessIdentifier'] == TestEntityInfo.entity_lear_mock['businessIdentifier']
assert affiliations[1]['businessIdentifier'] == TestEntityInfo.entity_lear_mock2['businessIdentifier']
| 54.495741
| 119
| 0.709213
| 4,183
| 31,989
| 5.253168
| 0.059766
| 0.023437
| 0.10112
| 0.119505
| 0.87467
| 0.862974
| 0.861245
| 0.851097
| 0.848366
| 0.837126
| 0
| 0.017613
| 0.164056
| 31,989
| 586
| 120
| 54.588737
| 0.804084
| 0.136516
| 0
| 0.719902
| 0
| 0
| 0.142539
| 0.0285
| 0
| 0
| 0
| 0
| 0.159705
| 1
| 0.093366
| false
| 0.007371
| 0.027027
| 0
| 0.120393
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92fb39646a5b2c52cd6e5ee76b9597231e86f94b
| 14,248
|
py
|
Python
|
migrations/versions/84a25e306a45_set_foreign_key_ondelete.py
|
jean-edouard-boulanger/finbot
|
ddc3c0e4673b1025d2352719755ff77ef445577c
|
[
"MIT"
] | 1
|
2020-12-25T19:33:27.000Z
|
2020-12-25T19:33:27.000Z
|
migrations/versions/84a25e306a45_set_foreign_key_ondelete.py
|
jean-edouard-boulanger/finbot
|
ddc3c0e4673b1025d2352719755ff77ef445577c
|
[
"MIT"
] | 1
|
2021-01-18T23:19:58.000Z
|
2021-01-19T17:35:13.000Z
|
migrations/versions/84a25e306a45_set_foreign_key_ondelete.py
|
jean-edouard-boulanger/finbot
|
ddc3c0e4673b1025d2352719755ff77ef445577c
|
[
"MIT"
] | 1
|
2020-01-19T22:37:36.000Z
|
2020-01-19T22:37:36.000Z
|
"""set foreign key ondelete
Revision ID: 84a25e306a45
Revises: 310f03145878
Create Date: 2020-02-09 15:52:59.695386
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '84a25e306a45'
down_revision = '310f03145878'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('finbot_linked_accounts_provider_id_fkey', 'finbot_linked_accounts', type_='foreignkey')
op.drop_constraint('finbot_linked_accounts_user_account_id_fkey', 'finbot_linked_accounts', type_='foreignkey')
op.create_foreign_key(None, 'finbot_linked_accounts', 'finbot_providers', ['provider_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'finbot_linked_accounts', 'finbot_user_accounts', ['user_account_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('finbot_linked_accounts_snapshots_linked_account_id_fkey', 'finbot_linked_accounts_snapshots', type_='foreignkey')
op.drop_constraint('finbot_linked_accounts_snapshots_snapshot_id_fkey', 'finbot_linked_accounts_snapshots', type_='foreignkey')
op.create_foreign_key(None, 'finbot_linked_accounts_snapshots', 'finbot_linked_accounts', ['linked_account_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'finbot_linked_accounts_snapshots', 'finbot_user_accounts_snapshots', ['snapshot_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('finbot_linked_accounts_valuation_his_effective_snapshot_id_fkey', 'finbot_linked_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint('finbot_linked_accounts_valuation_history__history_entry_id_fkey', 'finbot_linked_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint('finbot_linked_accounts_valuation_history_linked_account_id_fkey', 'finbot_linked_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint('finbot_linked_accounts_valuation_histo_valuation_change_id_fkey', 'finbot_linked_accounts_valuation_history_entries', type_='foreignkey')
op.create_foreign_key(None, 'finbot_linked_accounts_valuation_history_entries', 'finbot_user_accounts_snapshots', ['effective_snapshot_id'], ['id'], ondelete='SET NULL')
op.create_foreign_key(None, 'finbot_linked_accounts_valuation_history_entries', 'finbot_user_accounts_history_entries', ['history_entry_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'finbot_linked_accounts_valuation_history_entries', 'finbot_valuation_change_entries', ['valuation_change_id'], ['id'], ondelete='SET NULL')
op.create_foreign_key(None, 'finbot_linked_accounts_valuation_history_entries', 'finbot_linked_accounts', ['linked_account_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('finbot_sub_accounts_items_sna_sub_account_snapshot_entry_i_fkey', 'finbot_sub_accounts_items_snapshot_entries', type_='foreignkey')
op.create_foreign_key(None, 'finbot_sub_accounts_items_snapshot_entries', 'finbot_sub_accounts_snapshot_entries', ['sub_account_snapshot_entry_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('finbot_sub_accounts_items_valuation_hist_linked_account_id_fkey', 'finbot_sub_accounts_items_valuation_history_entries', type_='foreignkey')
op.drop_constraint('finbot_sub_accounts_items_valuation_histo_history_entry_id_fkey', 'finbot_sub_accounts_items_valuation_history_entries', type_='foreignkey')
op.drop_constraint('finbot_sub_accounts_items_valuation_hi_valuation_change_id_fkey', 'finbot_sub_accounts_items_valuation_history_entries', type_='foreignkey')
op.create_foreign_key(None, 'finbot_sub_accounts_items_valuation_history_entries', 'finbot_linked_accounts', ['linked_account_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'finbot_sub_accounts_items_valuation_history_entries', 'finbot_user_accounts_history_entries', ['history_entry_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'finbot_sub_accounts_items_valuation_history_entries', 'finbot_valuation_change_entries', ['valuation_change_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('finbot_sub_accounts_snapshot__linked_account_snapshot_entr_fkey', 'finbot_sub_accounts_snapshot_entries', type_='foreignkey')
op.create_foreign_key(None, 'finbot_sub_accounts_snapshot_entries', 'finbot_linked_accounts_snapshots', ['linked_account_snapshot_entry_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('finbot_sub_accounts_valuation_history_en_linked_account_id_fkey', 'finbot_sub_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint('finbot_sub_accounts_valuation_history_ent_history_entry_id_fkey', 'finbot_sub_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint('finbot_sub_accounts_valuation_history__valuation_change_id_fkey', 'finbot_sub_accounts_valuation_history_entries', type_='foreignkey')
op.create_foreign_key(None, 'finbot_sub_accounts_valuation_history_entries', 'finbot_linked_accounts', ['linked_account_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'finbot_sub_accounts_valuation_history_entries', 'finbot_user_accounts_history_entries', ['history_entry_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'finbot_sub_accounts_valuation_history_entries', 'finbot_valuation_change_entries', ['valuation_change_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('finbot_user_accounts_history_entries_user_account_id_fkey', 'finbot_user_accounts_history_entries', type_='foreignkey')
op.drop_constraint('finbot_user_accounts_history_entries_source_snapshot_id_fkey', 'finbot_user_accounts_history_entries', type_='foreignkey')
op.create_foreign_key(None, 'finbot_user_accounts_history_entries', 'finbot_user_accounts', ['user_account_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'finbot_user_accounts_history_entries', 'finbot_user_accounts_snapshots', ['source_snapshot_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('finbot_user_accounts_settings_user_account_id_fkey', 'finbot_user_accounts_settings', type_='foreignkey')
op.create_foreign_key(None, 'finbot_user_accounts_settings', 'finbot_user_accounts', ['user_account_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('finbot_user_accounts_snapshots_user_account_id_fkey', 'finbot_user_accounts_snapshots', type_='foreignkey')
op.create_foreign_key(None, 'finbot_user_accounts_snapshots', 'finbot_user_accounts', ['user_account_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('finbot_user_accounts_valuation_history_en_history_entry_id_fkey', 'finbot_user_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint('finbot_user_accounts_valuation_history_valuation_change_id_fkey', 'finbot_user_accounts_valuation_history_entries', type_='foreignkey')
op.create_foreign_key(None, 'finbot_user_accounts_valuation_history_entries', 'finbot_user_accounts_history_entries', ['history_entry_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'finbot_user_accounts_valuation_history_entries', 'finbot_valuation_change_entries', ['valuation_change_id'], ['id'], ondelete='SET NULL')
op.drop_constraint('finbot_xccy_rates_snapshots_snapshot_id_fkey', 'finbot_xccy_rates_snapshots', type_='foreignkey')
op.create_foreign_key(None, 'finbot_xccy_rates_snapshots', 'finbot_user_accounts_snapshots', ['snapshot_id'], ['id'], ondelete='CASCADE')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'finbot_xccy_rates_snapshots', type_='foreignkey')
op.create_foreign_key('finbot_xccy_rates_snapshots_snapshot_id_fkey', 'finbot_xccy_rates_snapshots', 'finbot_user_accounts_snapshots', ['snapshot_id'], ['id'])
op.drop_constraint(None, 'finbot_user_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint(None, 'finbot_user_accounts_valuation_history_entries', type_='foreignkey')
op.create_foreign_key('finbot_user_accounts_valuation_history_valuation_change_id_fkey', 'finbot_user_accounts_valuation_history_entries', 'finbot_valuation_change_entries', ['valuation_change_id'], ['id'])
op.create_foreign_key('finbot_user_accounts_valuation_history_en_history_entry_id_fkey', 'finbot_user_accounts_valuation_history_entries', 'finbot_user_accounts_history_entries', ['history_entry_id'], ['id'])
op.drop_constraint(None, 'finbot_user_accounts_snapshots', type_='foreignkey')
op.create_foreign_key('finbot_user_accounts_snapshots_user_account_id_fkey', 'finbot_user_accounts_snapshots', 'finbot_user_accounts', ['user_account_id'], ['id'])
op.drop_constraint(None, 'finbot_user_accounts_settings', type_='foreignkey')
op.create_foreign_key('finbot_user_accounts_settings_user_account_id_fkey', 'finbot_user_accounts_settings', 'finbot_user_accounts', ['user_account_id'], ['id'])
op.drop_constraint(None, 'finbot_user_accounts_history_entries', type_='foreignkey')
op.drop_constraint(None, 'finbot_user_accounts_history_entries', type_='foreignkey')
op.create_foreign_key('finbot_user_accounts_history_entries_source_snapshot_id_fkey', 'finbot_user_accounts_history_entries', 'finbot_user_accounts_snapshots', ['source_snapshot_id'], ['id'])
op.create_foreign_key('finbot_user_accounts_history_entries_user_account_id_fkey', 'finbot_user_accounts_history_entries', 'finbot_user_accounts', ['user_account_id'], ['id'])
op.drop_constraint(None, 'finbot_sub_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint(None, 'finbot_sub_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint(None, 'finbot_sub_accounts_valuation_history_entries', type_='foreignkey')
op.create_foreign_key('finbot_sub_accounts_valuation_history__valuation_change_id_fkey', 'finbot_sub_accounts_valuation_history_entries', 'finbot_valuation_change_entries', ['valuation_change_id'], ['id'])
op.create_foreign_key('finbot_sub_accounts_valuation_history_ent_history_entry_id_fkey', 'finbot_sub_accounts_valuation_history_entries', 'finbot_user_accounts_history_entries', ['history_entry_id'], ['id'])
op.create_foreign_key('finbot_sub_accounts_valuation_history_en_linked_account_id_fkey', 'finbot_sub_accounts_valuation_history_entries', 'finbot_linked_accounts', ['linked_account_id'], ['id'])
op.drop_constraint(None, 'finbot_sub_accounts_snapshot_entries', type_='foreignkey')
op.create_foreign_key('finbot_sub_accounts_snapshot__linked_account_snapshot_entr_fkey', 'finbot_sub_accounts_snapshot_entries', 'finbot_linked_accounts_snapshots', ['linked_account_snapshot_entry_id'], ['id'])
op.drop_constraint(None, 'finbot_sub_accounts_items_valuation_history_entries', type_='foreignkey')
op.drop_constraint(None, 'finbot_sub_accounts_items_valuation_history_entries', type_='foreignkey')
op.drop_constraint(None, 'finbot_sub_accounts_items_valuation_history_entries', type_='foreignkey')
op.create_foreign_key('finbot_sub_accounts_items_valuation_hi_valuation_change_id_fkey', 'finbot_sub_accounts_items_valuation_history_entries', 'finbot_valuation_change_entries', ['valuation_change_id'], ['id'])
op.create_foreign_key('finbot_sub_accounts_items_valuation_histo_history_entry_id_fkey', 'finbot_sub_accounts_items_valuation_history_entries', 'finbot_user_accounts_history_entries', ['history_entry_id'], ['id'])
op.create_foreign_key('finbot_sub_accounts_items_valuation_hist_linked_account_id_fkey', 'finbot_sub_accounts_items_valuation_history_entries', 'finbot_linked_accounts', ['linked_account_id'], ['id'])
op.drop_constraint(None, 'finbot_sub_accounts_items_snapshot_entries', type_='foreignkey')
op.create_foreign_key('finbot_sub_accounts_items_sna_sub_account_snapshot_entry_i_fkey', 'finbot_sub_accounts_items_snapshot_entries', 'finbot_sub_accounts_snapshot_entries', ['sub_account_snapshot_entry_id'], ['id'])
op.drop_constraint(None, 'finbot_linked_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint(None, 'finbot_linked_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint(None, 'finbot_linked_accounts_valuation_history_entries', type_='foreignkey')
op.drop_constraint(None, 'finbot_linked_accounts_valuation_history_entries', type_='foreignkey')
op.create_foreign_key('finbot_linked_accounts_valuation_histo_valuation_change_id_fkey', 'finbot_linked_accounts_valuation_history_entries', 'finbot_valuation_change_entries', ['valuation_change_id'], ['id'])
op.create_foreign_key('finbot_linked_accounts_valuation_history_linked_account_id_fkey', 'finbot_linked_accounts_valuation_history_entries', 'finbot_linked_accounts', ['linked_account_id'], ['id'])
op.create_foreign_key('finbot_linked_accounts_valuation_history__history_entry_id_fkey', 'finbot_linked_accounts_valuation_history_entries', 'finbot_user_accounts_history_entries', ['history_entry_id'], ['id'])
op.create_foreign_key('finbot_linked_accounts_valuation_his_effective_snapshot_id_fkey', 'finbot_linked_accounts_valuation_history_entries', 'finbot_user_accounts_snapshots', ['effective_snapshot_id'], ['id'])
op.drop_constraint(None, 'finbot_linked_accounts_snapshots', type_='foreignkey')
op.drop_constraint(None, 'finbot_linked_accounts_snapshots', type_='foreignkey')
op.create_foreign_key('finbot_linked_accounts_snapshots_snapshot_id_fkey', 'finbot_linked_accounts_snapshots', 'finbot_user_accounts_snapshots', ['snapshot_id'], ['id'])
op.create_foreign_key('finbot_linked_accounts_snapshots_linked_account_id_fkey', 'finbot_linked_accounts_snapshots', 'finbot_linked_accounts', ['linked_account_id'], ['id'])
op.drop_constraint(None, 'finbot_linked_accounts', type_='foreignkey')
op.drop_constraint(None, 'finbot_linked_accounts', type_='foreignkey')
op.create_foreign_key('finbot_linked_accounts_user_account_id_fkey', 'finbot_linked_accounts', 'finbot_user_accounts', ['user_account_id'], ['id'])
op.create_foreign_key('finbot_linked_accounts_provider_id_fkey', 'finbot_linked_accounts', 'finbot_providers', ['provider_id'], ['id'])
# ### end Alembic commands ###
| 119.731092
| 221
| 0.828186
| 1,832
| 14,248
| 5.801856
| 0.043668
| 0.089566
| 0.101609
| 0.0779
| 0.97234
| 0.971964
| 0.971964
| 0.971964
| 0.965001
| 0.946373
| 0
| 0.004484
| 0.060851
| 14,248
| 118
| 222
| 120.745763
| 0.789851
| 0.021477
| 0
| 0.18
| 0
| 0
| 0.657965
| 0.54484
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02
| false
| 0
| 0.02
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
92fddb1118add96458c0534dd1ab74362605bba5
| 19,030
|
py
|
Python
|
tests/test_vis/test_vis_smpl.py
|
ttxskk/mmhuman3d
|
f6d39e24a2d5cc216448fc3bd82832ff45eee436
|
[
"Apache-2.0"
] | null | null | null |
tests/test_vis/test_vis_smpl.py
|
ttxskk/mmhuman3d
|
f6d39e24a2d5cc216448fc3bd82832ff45eee436
|
[
"Apache-2.0"
] | null | null | null |
tests/test_vis/test_vis_smpl.py
|
ttxskk/mmhuman3d
|
f6d39e24a2d5cc216448fc3bd82832ff45eee436
|
[
"Apache-2.0"
] | null | null | null |
import subprocess
import numpy as np
import pytest
import torch
from mmhuman3d.core.visualization import (
visualize_smpl_calibration,
visualize_smpl_hmr,
visualize_smpl_pose,
visualize_smpl_vibe,
visualize_T_pose,
)
from mmhuman3d.utils.ffmpeg_utils import (
array_to_images,
array_to_video,
video_to_array,
)
model_path = 'data/body_models'
def test_visualize_smpl_pose():
if torch.cuda.is_available():
device_name = 'cuda:0'
else:
device_name = 'cpu'
# wrong input shape
with pytest.raises(ValueError):
visualize_smpl_pose(
poses=torch.zeros(2, 71),
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
render_choice='hq',
resolution=(128, 128),
overwrite=True,
device=device_name)
with pytest.raises(ValueError):
visualize_smpl_pose(
poses=torch.zeros(2, 164),
model_type='smplx',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
render_choice='hq',
overwrite=True,
device=device_name)
with pytest.raises(RuntimeError):
pose_dict = {
'body_pose': torch.zeros(2, 68),
'global_orient': torch.zeros(2, 3)
}
visualize_smpl_pose(
poses=pose_dict,
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
render_choice='hq',
overwrite=True,
device=device_name)
with pytest.raises(RuntimeError):
pose_dict = {
'body_pose': torch.zeros(2, 64),
'global_orient': torch.zeros(2, 3),
'left_hand_pose': torch.zeros(2, 45),
'right_hand_pose': torch.zeros(2, 45),
'jaw_pose': torch.zeros(2, 3),
'leye_pose': torch.zeros(2, 3),
'reye_pose': torch.zeros(2, 3),
}
visualize_smpl_pose(
poses=pose_dict,
model_type='smplx',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
render_choice='hq',
overwrite=True,
device=device_name)
# wrong input keys
with pytest.raises(KeyError):
pose_dict = {
'wrong_smpl_name': torch.zeros(2, 69),
'global_orient': torch.zeros(2, 3)
}
visualize_smpl_pose(
poses=pose_dict,
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
render_choice='hq',
overwrite=True,
device=device_name)
with pytest.raises(KeyError):
pose_dict = {
'wrong_smplx_name': torch.zeros(2, 63),
'global_orient': torch.zeros(2, 3),
'left_hand_pose': torch.zeros(2, 45),
'right_hand_pose': torch.zeros(2, 45),
'jaw_pose': torch.zeros(2, 3),
'leye_pose': torch.zeros(2, 3),
'reye_pose': torch.zeros(2, 3),
}
visualize_smpl_pose(
poses=pose_dict,
model_type='smplx',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
render_choice='hq',
overwrite=True,
device=device_name)
# wrong output path
with pytest.raises(FileExistsError):
v = np.zeros((3, 512, 512, 3))
array_to_video(v, output_path='/tmp/1.mp4')
visualize_smpl_pose(
poses=torch.zeros(2, 72),
model_type='smpl',
output_path='/tmp/1.mp4',
model_path=model_path,
resolution=(128, 128),
render_choice='hq',
overwrite=False,
device=device_name)
# wrong body model weight path
with pytest.raises(FileNotFoundError):
command = ['touch', '/tmp/1.mp4']
subprocess.call(command)
visualize_smpl_pose(
poses=torch.zeros(2, 72),
model_type='smpl',
output_path='/tmp/1.mp4',
resolution=(128, 128),
model_path='/312',
render_choice='hq',
overwrite=True,
device=device_name)
with pytest.raises(AssertionError):
command = ['touch', '/tmp/1.mp4']
subprocess.call(command)
visualize_smpl_pose(
poses=torch.zeros(2, 72),
model_type='smpl',
output_path='/tmp/1.mp4',
resolution=(128, 128),
model_path='/tmp',
render_choice='hq',
overwrite=True,
device=device_name)
with pytest.raises(FileNotFoundError):
command = ['touch', '/tmp/1.mp4']
subprocess.call(command)
visualize_smpl_pose(
poses=torch.zeros(2, 72),
model_type='smpl',
output_path='/tmp/1.mp4',
resolution=(128, 128),
model_path='/123',
render_choice='hq',
overwrite=True,
device=device_name)
visualize_smpl_pose(
poses=torch.zeros(1, 72),
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (1, 48, 48, 3)
visualize_smpl_pose(
poses=torch.zeros(1, 2, 72),
model_type='smpl',
betas=torch.zeros(1, 10),
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (1, 128, 128, 3)
visualize_smpl_pose(
poses=torch.zeros(1, 2, 72),
betas=torch.zeros(1, 2, 10),
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (1, 128, 128, 3)
with pytest.raises(ValueError):
visualize_smpl_pose(
poses=torch.zeros(1, 3, 72),
betas=torch.zeros(1, 2, 10),
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
overwrite=True,
device=device_name)
with pytest.raises(ValueError):
visualize_smpl_pose(
poses=torch.zeros(1, 3, 72),
transl=torch.zeros(1, 2, 3),
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
overwrite=True,
device=device_name)
visualize_smpl_pose(
poses=torch.zeros(1, 2, 72),
betas=torch.zeros(1, 3, 10),
transl=torch.zeros(1, 3, 3),
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
overwrite=True,
device=device_name)
visualize_smpl_pose(
poses=torch.zeros(10, 72),
betas=torch.zeros(1, 10),
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(128, 128),
overwrite=True,
device=device_name)
visualize_smpl_pose(
poses=torch.zeros(1, 165),
model_type='smplx',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (1, 48, 48, 3)
visualize_smpl_pose(
poses=torch.zeros(1, 72),
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (1, 48, 48, 3)
pose_dict = {
'body_pose': torch.zeros(2, 69),
'global_orient': torch.zeros(2, 3)
}
visualize_smpl_pose(
poses=pose_dict,
model_type='smpl',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
pose_dict = {
'body_pose': torch.zeros(2, 63),
'global_orient': torch.zeros(2, 3),
'left_hand_pose': torch.zeros(2, 45),
'right_hand_pose': torch.zeros(2, 45),
'jaw_pose': torch.zeros(2, 3),
'leye_pose': torch.zeros(2, 3),
'reye_pose': torch.zeros(2, 3),
}
visualize_smpl_pose(
poses=pose_dict,
model_type='smplx',
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
pred_cam = torch.ones(10, 4)
bbox = torch.tensor([0, 0, 100, 100]).view(1, 4).repeat(10, 1)
visualize_smpl_vibe(
poses=pose_dict,
model_type='smplx',
pred_cam=pred_cam,
bbox=bbox,
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smplx',
orbit_speed=(1.0, 0.5),
model_path=model_path,
output_path='/tmp/1.mp4',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smpl',
orbit_speed=(1.0, 0.5),
model_path=model_path,
output_path='/tmp/1.mp4',
palette='segmentation',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smpl',
orbit_speed=(1.0, 0.5),
model_path=model_path,
render_choice='normal',
output_path='/tmp/1.mp4',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smpl',
orbit_speed=(1.0, 0.5),
model_path=model_path,
render_choice='depth',
output_path='/tmp/1.mp4',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smpl',
orbit_speed=(1.0, 0.5),
model_path=model_path,
render_choice='pointcloud',
output_path='/tmp/1.mp4',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smpl',
orbit_speed=(1.0, 0.5),
model_path=model_path,
render_choice='silhouette',
output_path='/tmp/1.mp4',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smpl',
orbit_speed=(1.0, 0.5),
model_path=model_path,
render_choice='part_silhouette',
output_path='/tmp/1.mp4',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smpl',
orbit_speed=(1.0, 0.5),
model_path=model_path,
render_choice='mq',
output_path='/tmp/1.mp4',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smpl',
orbit_speed=(1.0, 0.5),
model_path=model_path,
render_choice='lq',
output_path='/tmp/1.mp4',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
visualize_T_pose(
num_frames=2,
model_type='smpl',
orbit_speed=(1.0, 0.5),
model_path=model_path,
render_choice='hq',
output_path='/tmp/1.mp4',
resolution=(48, 48),
batch_size=5,
overwrite=True,
device=device_name)
assert video_to_array('/tmp/1.mp4').shape == (2, 48, 48, 3)
K = torch.zeros(1, 4, 4)
K[:, 0, 0] = 1
K[:, 1, 1] = 1
K[:, 0, 2] = 1
K[:, 1, 2] = 1
R = torch.eye(3, 3)[None]
T = torch.zeros(1, 3)
visualize_smpl_calibration(
poses=pose_dict,
model_type='smplx',
model_path=model_path,
K=K,
R=R,
T=T,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
K = torch.zeros(1, 4, 4)
K[:, 0, 0] = 1
K[:, 1, 1] = 1
K[:, 0, 2] = 1
K[:, 1, 2] = 1
R = torch.eye(3, 3)[None]
T = torch.zeros(1, 3)
betas = torch.zeros(2, 10)
transl = torch.zeros(2, 3)
visualize_smpl_calibration(
poses=pose_dict,
model_type='smplx',
model_path=model_path,
betas=betas,
transl=transl,
K=K,
R=R,
T=T,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
bbox = np.zeros((3, 1, 4))
cam_transl = torch.zeros(3, 1, 3)
visualize_smpl_hmr(
poses=torch.zeros(3, 165),
model_type='smplx',
model_path=model_path,
bbox=bbox,
cam_transl=cam_transl,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
bbox = np.zeros((3, 1, 4))
cam_transl = torch.zeros(3, 1, 3)
visualize_smpl_hmr(
verts=torch.zeros(3, 6890, 3),
model_type='smpl',
model_path=model_path,
bbox=bbox,
cam_transl=cam_transl,
output_path='/tmp/1.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
bbox = np.zeros((3, 1, 4))
T = torch.zeros(3, 1, 3)
visualize_smpl_hmr(
poses=torch.zeros(3, 165),
model_type='smplx',
model_path=model_path,
bbox=bbox,
cam_transl=cam_transl,
origin_frames='/tmp/1.mp4',
output_path='/tmp/2.mp4',
resolution=(48, 48),
overwrite=True,
device=device_name)
image_array = np.random.randint(
low=0, high=255, size=(3, 128, 128, 3), dtype=np.uint8)
array_to_images(image_array, '/tmp/temp_images', img_format='%06d.png')
bbox = np.zeros((3, 1, 4))
cam_transl = torch.zeros(3, 1, 3)
visualize_smpl_hmr(
poses=torch.zeros(3, 165),
model_type='smplx',
model_path=model_path,
bbox=bbox,
cam_transl=cam_transl,
output_path='/tmp/1.mp4',
frame_list=['/tmp/temp_images/%06d.png' % 0] * 3,
resolution=(48, 48),
overwrite=True,
device=device_name)
bbox = np.zeros((3, 1, 4))
cam_transl = torch.zeros(3, 1, 3)
visualize_smpl_hmr(
poses=torch.zeros(3, 165),
model_type='smplx',
model_path=model_path,
bbox=bbox,
cam_transl=cam_transl,
output_path='/tmp/1.mp4',
img_format='%06d.png',
origin_frames='/tmp/temp_images',
resolution=(48, 48),
overwrite=True,
device=device_name)
visualize_smpl_hmr(
poses=torch.zeros(3, 165),
model_type='smplx',
model_path=model_path,
bbox=bbox,
cam_transl=cam_transl,
output_path='/tmp/1.mp4',
origin_frames='/tmp/temp_images',
img_format='%06d.png',
resolution=(48, 48),
overwrite=True,
palette=np.ones((1, 3)),
device=device_name)
visualize_smpl_hmr(
poses=torch.zeros(3, 3, 165),
model_type='smplx',
model_path=model_path,
bbox=np.zeros((3, 3, 4)),
cam_transl=torch.zeros(3, 3, 3),
output_path='/tmp/1.mp4',
origin_frames='/tmp/temp_images',
img_format='%06d.png',
resolution=(128, 128),
overwrite=True,
palette=np.ones((1, 3)),
device=device_name)
visualize_smpl_hmr(
poses=torch.zeros(3, 3, 165),
model_type='smplx',
model_path=model_path,
bbox=np.zeros((3, 3, 4)),
cam_transl=torch.zeros(3, 3, 3),
output_path='/tmp/1.mp4',
origin_frames='/tmp/temp_images',
img_format='%06d.png',
resolution=(128, 128),
overwrite=True,
mesh_file_path='/tmp',
palette=np.ones((1, 3)),
device=device_name)
visualize_smpl_hmr(
poses=torch.zeros(3, 3, 165),
model_type='smplx',
model_path=model_path,
bbox=np.zeros((3, 3, 4)),
cam_transl=torch.zeros(3, 3, 3),
output_path='/tmp/1.mp4',
origin_frames='/tmp/temp_images',
img_format='%06d.png',
resolution=(128, 128),
overwrite=True,
mesh_file_path='/tmp',
palette='random',
device=device_name)
with pytest.raises(ValueError):
visualize_smpl_hmr(
poses=torch.zeros(3, 3, 165),
model_type='smplx',
model_path=model_path,
bbox=np.zeros((3, 3, 4)),
cam_transl=torch.zeros(3, 3, 3),
output_path='/tmp/1.mp4',
origin_frames='/tmp/temp_images',
img_format='%06d.png',
resolution=(128, 128),
overwrite=True,
mesh_file_path='/tmp',
palette='wrong_palette',
device=device_name)
with pytest.raises(ValueError):
visualize_smpl_hmr(
poses=torch.zeros(3, 3, 165),
model_type='smplx',
model_path=model_path,
bbox=np.zeros((3, 3, 4)),
cam_transl=torch.zeros(3, 3, 3),
output_path='/tmp/1.mp4',
origin_frames='/tmp/temp_images',
img_format='%06d.png',
resolution=(128, 128),
overwrite=True,
mesh_file_path='/tmp',
palette=None,
device=device_name)
| 29.549689
| 75
| 0.548975
| 2,419
| 19,030
| 4.096321
| 0.062009
| 0.079927
| 0.047331
| 0.063579
| 0.896155
| 0.889999
| 0.882733
| 0.870118
| 0.864265
| 0.859623
| 0
| 0.06881
| 0.313452
| 19,030
| 643
| 76
| 29.595645
| 0.689629
| 0.004256
| 0
| 0.872483
| 0
| 0
| 0.083245
| 0.00132
| 0
| 0
| 0
| 0
| 0.031879
| 1
| 0.001678
| false
| 0
| 0.010067
| 0
| 0.011745
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13496c675e6d0fae2515fbb449a40d25a10582e6
| 21,365
|
py
|
Python
|
src/autocorrelation_accl.py
|
FAU-LS12-RC/Finding-COs-in-Side-Channel-Traces
|
fcd93980b55e621a267bd75c04612d25ccb45282
|
[
"Apache-2.0"
] | 3
|
2021-11-10T19:38:58.000Z
|
2022-03-31T10:53:19.000Z
|
src/autocorrelation_accl.py
|
FAU-LS12-RC/Finding-COs-on-Side-Channel-Traces
|
fcd93980b55e621a267bd75c04612d25ccb45282
|
[
"Apache-2.0"
] | null | null | null |
src/autocorrelation_accl.py
|
FAU-LS12-RC/Finding-COs-on-Side-Channel-Traces
|
fcd93980b55e621a267bd75c04612d25ccb45282
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
from __future__ import absolute_import
from __future__ import print_function
# sure imports
import numpy as np
import scipy
import pyopencl as cl
from src.helper import printProgressBar, top_x_array, Plotter, detrending_filter, autocorr_loop
# open-cl stuff:
class Autocorrelation_Accelerator:
def __init__(self, data=None, no_similar_rounds=None, top_x=10, do_plots=False, use_detrended=False, hidden_aes_operations=33, trace_container=None):
self.data = data
self.no_similar_rounds = no_similar_rounds
self.top_x = top_x
self.do_plots = do_plots
self.use_detrended = use_detrended
self.hidden_aes_operations = hidden_aes_operations
self.trace_container = trace_container
def calc_sad(self, template_candidate, trace, idx_list=[]):
# insert code here :)
ctx = cl.create_some_context()
queue = cl.CommandQueue(ctx)
mf = cl.mem_flags
# setup memory:
trace_host = trace.astype(np.float32)
trace_dev = cl.Buffer(ctx, mf.READ_ONLY |
mf.COPY_HOST_PTR, hostbuf=trace_host)
template_candidate_host = template_candidate.astype(np.float32)
template_candidate_dev = cl.Buffer(
ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=template_candidate_host)
correlation_dev = cl.Buffer(ctx, mf.WRITE_ONLY, trace_host.nbytes)
template_length_dev = cl.Buffer(
ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=np.int32(len(template_candidate)))
programstring = """
#if __OPENCL_VERSION__ < 120
#if cl_khr_fp64
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
#elif cl_amd_fp64
#pragma OPENCL EXTENSION cl_amd_fp64 : enable
#else
#error Missing double precision extension
#endif
#endif
float sad_calculation(__global const float* X, __global const float* avg_segment_adj, int n){
float sad = 0;
for (int i = 0; i < n; ++i){
sad += fabs((float)(avg_segment_adj[i]-X[i]));
}
return sad;
}
__kernel void correlate(__global const float *data, __global float *correlation, __global float *template_candidate, __global int *template_length){
int i = get_global_id(0);
int idx_max = get_global_size(0);
int i_template_length = *template_length;
// Abort if we would otherwise run out of valid idx
if(i+i_template_length >= idx_max){
correlation[i] = -1;
return;
}
correlation[i] = sad_calculation(&data[i], template_candidate, i_template_length);
}
"""
prg = cl.Program(ctx, programstring).build()
prg.correlate(queue, trace_host.shape, None, trace_dev,
correlation_dev, template_candidate_dev, template_length_dev)
correlation_host = np.empty_like(trace_host)
cl.enqueue_copy(queue, correlation_host, correlation_dev)
#Plotter(range(len(correlation_host)),correlation_host,"Sample","Correlation","Correlation of right width (autocorr)","correlation_host")
#Plotter(range(len(correlation_host_detrended)),correlation_host_detrended,"Sample","Correlation detrended","Correlation detrended of right width (autocorr)","correlation_host")
# print(correlation_host)
# print(len(correlation_host))
#print("max = " + str(max(correlation_host)))
if len(idx_list) == 0:
idx_list = np.array(range(len(trace)))
else:
idx_list = np.array(idx_list)
return correlation_host[idx_list[np.where(idx_list < len(correlation_host)-len(template_candidate))]]
def correlate(self, trace, template_candidate, idx_list, opencl=True, print_times=True):
from time import process_time
t1_start = process_time()
if not opencl:
correlation = np.array([np.abs(scipy.stats.pearsonr(
template_candidate, trace[idx:idx+len(template_candidate)])[0]) for idx in idx_list])
else:
# insert code here :)
ctx = cl.create_some_context()
queue = cl.CommandQueue(ctx)
mf = cl.mem_flags
# setup memory:
trace_host = trace.astype(np.float32)
trace_dev = cl.Buffer(ctx, mf.READ_ONLY |
mf.COPY_HOST_PTR, hostbuf=trace_host)
template_candidate_host = template_candidate.astype(np.float32)
template_candidate_dev = cl.Buffer(
ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=template_candidate_host)
correlation_dev = cl.Buffer(ctx, mf.WRITE_ONLY, trace_host.nbytes)
template_length_dev = cl.Buffer(
ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=np.int32(len(template_candidate)))
programstring = """
#if __OPENCL_VERSION__ < 120
#if cl_khr_fp64
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
#elif cl_amd_fp64
#pragma OPENCL EXTENSION cl_amd_fp64 : enable
#else
#error Missing double precision extension
#endif
#endif
float correlationCoefficient(__global const float* X, __global const float* avg_segment_adj, int n){
float sum_X = 0, sum_Y = 0, sum_XY = 0;
float squareSum_X = 0, squareSum_Y = 0;
for (int i = 0; i < n; ++i){
sum_X = sum_X + X[i];
sum_Y = sum_Y + avg_segment_adj[i];
sum_XY = sum_XY + X[i] * avg_segment_adj[i];
squareSum_X = squareSum_X + X[i] * X[i];
squareSum_Y = squareSum_Y + avg_segment_adj[i] * avg_segment_adj[i];
}
float corr = (float)(n * sum_XY - sum_X * sum_Y) / sqrt((float)((n * squareSum_X - sum_X * sum_X) * (n * squareSum_Y - sum_Y * sum_Y)));
return corr;
}
__kernel void correlate(__global const float *data, __global float *correlation, __global float *template_candidate, __global int *template_length){
int i = get_global_id(0);
int idx_max = get_global_size(0);
int i_template_length = *template_length;
// Abort if we would otherwise run out of valid idx
if(i+i_template_length >= idx_max){
correlation[i] = 0;
return;
}
correlation[i] = correlationCoefficient(&data[i], template_candidate, i_template_length);
}
"""
prg = cl.Program(ctx, programstring).build()
prg.correlate(queue, trace_host.shape, None, trace_dev,
correlation_dev, template_candidate_dev, template_length_dev)
correlation_host = np.empty_like(trace_host)
cl.enqueue_copy(queue, correlation_host, correlation_dev)
#Plotter(range(len(correlation_host)),correlation_host,"Sample","Correlation","Correlation of right width (autocorr)","correlation_host")
#Plotter(range(len(correlation_host_detrended)),correlation_host_detrended,"Sample","Correlation detrended","Correlation detrended of right width (autocorr)","correlation_host")
# print(correlation_host)
# print(len(correlation_host))
#print("max = " + str(max(correlation_host)))
correlation = np.abs(correlation_host)[idx_list]
t1_stop = process_time()
if print_times:
if opencl:
print("GPU: correlate used " +
str((t1_stop-t1_start)) + " seconds")
else:
print("CPU: correlate used " +
str((t1_stop-t1_start)) + " seconds")
return correlation
def autocorrelation_accelerated_updated(self, w_list):
# go through all possible widhts to determine the best one! (w = perfect segment width)
correlation_for_each_width = []
ctx = cl.create_some_context()
queue = cl.CommandQueue(ctx)
mf = cl.mem_flags
#data_dev = cl_array.to_device(queue, self.data)
data_host = self.data.astype(np.float32)
correlation_dev = cl.Buffer(ctx, mf.WRITE_ONLY, data_host.nbytes)
data_dev = cl.Buffer(ctx, mf.READ_ONLY |
mf.COPY_HOST_PTR, hostbuf=data_host)
no_similar_rounds_dev = cl.Buffer(
ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=np.int32(self.no_similar_rounds))
widths_correlation = []
for w in w_list:
programstring = "#define WIDTH "+str(w)+"""
#if __OPENCL_VERSION__ < 120
#if cl_khr_fp64
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
#elif cl_amd_fp64
#pragma OPENCL EXTENSION cl_amd_fp64 : enable
#else
#error Missing double precision extension
#endif
#endif
float correlationCoefficient_stable(__global const float* X, __private const float* avg_segment_adj, int n){
float sum_X = 0, sum_Y = 0, sum_XY = 0;
float squareSum_X = 0, squareSum_Y = 0;
for (int i = 0; i < n; ++i){
sum_X = sum_X + X[i];
sum_Y = sum_Y + avg_segment_adj[i];
sum_XY = sum_XY + X[i] * avg_segment_adj[i];
squareSum_X = squareSum_X + X[i] * X[i];
squareSum_Y = squareSum_Y + avg_segment_adj[i] * avg_segment_adj[i];
}
float corr = (float)(n * sum_XY - sum_X * sum_Y) / sqrt((float)((n * squareSum_X - sum_X * sum_X) * (n * squareSum_Y - sum_Y * sum_Y))+0.00001);
return corr;
}
__kernel void correlate(__global const float *data, __global float *correlation, __global int *no_similar_rounds){
int i = get_global_id(0);
int idx_max = get_global_size(0);
float avg_segment[WIDTH]= { 0 };
int w = WIDTH;
int max_rounds = *no_similar_rounds;
// Abort if we would otherwise run out of valid idx
if(i+max_rounds*w >= idx_max){
correlation[i] = 0;
return;
}
// Create mean segment:
for(int round = 0; round < max_rounds; ++round){
for(int avg_idx=0; avg_idx<w; ++avg_idx){
avg_segment[avg_idx] += data[i+avg_idx+(round*w)]/max_rounds;
}
}
//find avg correlation:
float avg_correlation = 0;
for(int round = 0; round < max_rounds; ++round){
float round_correlation = correlationCoefficient_stable(&data[i+round*w],avg_segment,w);
avg_correlation += round_correlation/max_rounds;
}
correlation[i] = avg_correlation;
}
"""
prg = cl.Program(ctx, programstring).build()
len_all_rounds = w*self.no_similar_rounds
if(len_all_rounds > len(data_host)):
break
# all start positions need to be considered!
prg.correlate(queue, data_host.shape, None, data_dev,
correlation_dev, no_similar_rounds_dev)
correlation_host = np.empty_like(data_host)
cl.enqueue_copy(queue, correlation_host, correlation_dev)
if self.use_detrended:
correlation_host_detrended = detrending_filter(
correlation_host, w*self.no_similar_rounds)
if self.do_plots:
if not self.use_detrended:
Plotter(range(len(correlation_host)), np.array(correlation_host, dtype=float), "Sample", "Similarity",
"Similarity of width" + str(w) + " (autocorr)", "Similarity (Step 1) for width: " + str(w), decimation_factor=30)
else:
Plotter(range(len(correlation_host_detrended)), np.array(correlation_host_detrended, dtype=float), "Sample", "Similarity detrended",
"Correlation detrended of width " + str(w) + " (autocorr)", "correlation_host_detrended " + str(w), decimation_factor=30)
self.trace_container.quality_plot = correlation_host
if self.use_detrended:
add_len = len(correlation_host) - \
len(correlation_host_detrended)
correlation_host = np.concatenate(
(correlation_host_detrended, np.zeros(add_len)))
correlation_for_each_width.append(correlation_host)
top_x_correlation = top_x_array(
np.array(correlation_host), self.top_x, scale=1)
widths_correlation.append(top_x_correlation)
printProgressBar(len(widths_correlation), len(w_list))
print("\n")
print("-------AUTOCORR-------RESULTS-------------------------------")
widths_correlation = np.array(widths_correlation)
# np.save("widths_correlation_savepoint",widths_correlation)
best_widths = top_x_array(widths_correlation[:, 0, 0], 100)
filtered_best_widths = []
for item in best_widths:
if item[0] > 0.:
filtered_best_widths.append(item)
best_widths = np.array(filtered_best_widths)
return best_widths, widths_correlation, correlation_for_each_width
def autocorrelation_accelerated_knownWidth(self, w):
# go through all possible widhts to determine the best one! (w = perfect segment width)
w_list = np.array([w])
best_widths, widths_correlation, correlation_for_each_width = self.autocorrelation_accelerated_updated(
w_list)
return widths_correlation[0], correlation_for_each_width[0]
def autocorr_cpu(self, w_list):
# go through all possible widhts to determine the best one! (w = perfect segment width)
widths_correlation = []
for w in w_list:
printProgressBar(np.where(w_list == w)[0][0], len(w_list)-1)
len_all_rounds = w*self.no_similar_rounds
if(len_all_rounds > len(self.data)):
break
# all start positions need to be considered!
correlation_list = autocorr_loop(
len_all_rounds, w, self.data, self.no_similar_rounds)
# found best correlation spot --> how many peaks do we get here?!
print(len(correlation_list))
top_x_correlation = top_x_array(
np.array(correlation_list), self.top_x, scale=1)
widths_correlation.append(top_x_correlation)
#correlation_list_detrended = detrending_filter(correlation_list[:len(correlation_list)-w*self.trace_container.no_similar_rounds],w*self.trace_container.no_similar_rounds*2)
#Plotter(range(len(correlation_list)),correlation_list,"Sample","Correlation","Correlation of right width (autocorr)","correlation_host")
#Plotter(range(len(correlation_list_detrended)),correlation_list_detrended,"Sample","Correlation detrended","Correlation detrended of right width (autocorr)","correlation_host")
print("\n")
print("--------------RESULTS-------------------------------")
widths_correlation = np.array(widths_correlation)
best_widths = top_x_array(widths_correlation[:, 0, 0], 20)
return best_widths, widths_correlation
def autosad_accelerated_updated(self, w_list):
# go through all possible widhts to determine the best one! (w = perfect segment width)
correlation_for_each_width = []
ctx = cl.create_some_context()
queue = cl.CommandQueue(ctx)
mf = cl.mem_flags
#data_dev = cl_array.to_device(queue, self.data)
data_host = self.data.astype(np.float32)
correlation_dev = cl.Buffer(ctx, mf.WRITE_ONLY, data_host.nbytes)
data_dev = cl.Buffer(ctx, mf.READ_ONLY |
mf.COPY_HOST_PTR, hostbuf=data_host)
no_similar_rounds_dev = cl.Buffer(
ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=np.int32(self.no_similar_rounds))
widths_correlation = []
for w in w_list:
programstring = "#define WIDTH "+str(w)+"""
#if __OPENCL_VERSION__ < 120
#if cl_khr_fp64
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
#elif cl_amd_fp64
#pragma OPENCL EXTENSION cl_amd_fp64 : enable
#else
#error Missing double precision extension
#endif
#endif
float negative_sad_calculation(__global const float* X, __private const float* avg_segment_adj, int n){
float sad = 0;
for (int i = 0; i < n; ++i){
sad -= fabs((float)(avg_segment_adj[i]-X[i]));
}
return sad;
}
__kernel void correlate(__global const float *data, __global float *correlation, __global int *no_similar_rounds){
int i = get_global_id(0);
int idx_max = get_global_size(0);
float avg_segment[WIDTH]= { 0 };
int w = WIDTH;
int max_rounds = *no_similar_rounds;
// Abort if we would otherwise run out of valid idx
if(i+max_rounds*w >= idx_max){
correlation[i] = -FLT_MAX;
return;
}
// Create mean segment:
for(int round = 0; round < max_rounds; ++round){
for(int avg_idx=0; avg_idx<w; ++avg_idx){
avg_segment[avg_idx] += data[i+avg_idx+(round*w)]/max_rounds;
}
}
//find avg correlation:
float avg_correlation = 0;
for(int round = 0; round < max_rounds; ++round){
float round_correlation = negative_sad_calculation(&data[i+round*w],avg_segment,w);
avg_correlation += round_correlation/max_rounds;
}
correlation[i] = avg_correlation;
}
"""
prg = cl.Program(ctx, programstring).build()
len_all_rounds = w*self.no_similar_rounds
if(len_all_rounds > len(data_host)):
break
# all start positions need to be considered!
prg.correlate(queue, data_host.shape, None, data_dev,
correlation_dev, no_similar_rounds_dev)
correlation_host = np.empty_like(data_host)
cl.enqueue_copy(queue, correlation_host, correlation_dev)
correlation_host_detrended = detrending_filter(
correlation_host[:len(correlation_host)-w*self.no_similar_rounds], w)
if self.do_plots:
Plotter(range(len(correlation_host[:int(len(correlation_host)-w*self.no_similar_rounds)])), np.array(correlation_host[:int(len(correlation_host)-w *
self.no_similar_rounds)], dtype=float), "Sample", "Correlation", "Correlation of right width (autocorr)", "correlation_host", decimation_factor=10)
Plotter(range(len(correlation_host_detrended)), np.array(correlation_host_detrended, dtype=float), "Sample",
"Correlation detrended", "Correlation detrended of right width (autocorr)", "correlation_host_detrended", decimation_factor=10)
if self.use_detrended:
add_len = len(correlation_host) - \
len(correlation_host_detrended)
correlation_host = np.concatenate(
(correlation_host_detrended, np.zeros(add_len)))
else:
correlation_host = correlation_host / \
correlation_host.max(axis=0)
correlation_for_each_width.append(correlation_host)
top_x_correlation = top_x_array(
np.array(correlation_host), self.top_x, scale=1)
print("top x correlation: " + str(top_x_correlation))
widths_correlation.append(top_x_correlation)
printProgressBar(len(widths_correlation), len(w_list))
print("\n")
print("-------AUTOSAD-------RESULTS-------------------------------")
widths_correlation = np.array(widths_correlation)
# np.save("widths_correlation_savepoint",widths_correlation)
best_widths = top_x_array(widths_correlation[:, 0, 0], 100)
filtered_best_widths = []
for item in best_widths:
if item[0] > 0.:
filtered_best_widths.append(item)
best_widths = np.array(filtered_best_widths)
return best_widths, widths_correlation, correlation_for_each_width
| 47.477778
| 189
| 0.589562
| 2,467
| 21,365
| 4.793677
| 0.094447
| 0.082445
| 0.029173
| 0.016574
| 0.826484
| 0.802638
| 0.799425
| 0.777693
| 0.760781
| 0.751903
| 0
| 0.010717
| 0.314346
| 21,365
| 449
| 190
| 47.583519
| 0.796573
| 0.101287
| 0
| 0.709859
| 0
| 0.011268
| 0.423705
| 0.040639
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019718
| false
| 0
| 0.019718
| 0
| 0.070423
| 0.047887
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13c1eb762343d9e50ac1221373298592a1177eb6
| 530
|
py
|
Python
|
smartcontract/venv/lib/python3.6/site-packages/boa_test/example/BinopTest.py
|
simplitech/neoinvoice
|
bc9a0217858938b49f99fef13b3439f4a537a5f5
|
[
"MIT"
] | null | null | null |
smartcontract/venv/lib/python3.6/site-packages/boa_test/example/BinopTest.py
|
simplitech/neoinvoice
|
bc9a0217858938b49f99fef13b3439f4a537a5f5
|
[
"MIT"
] | null | null | null |
smartcontract/venv/lib/python3.6/site-packages/boa_test/example/BinopTest.py
|
simplitech/neoinvoice
|
bc9a0217858938b49f99fef13b3439f4a537a5f5
|
[
"MIT"
] | null | null | null |
# tested
def Main(operation, a, b):
if operation == '&':
print("DOING AND!")
return a & b
elif operation == '|':
return a | b
elif operation == '^':
return a ^ b
elif operation == '>>':
return a >> b
elif operation == '<<':
return a << b
elif operation == '%':
return a % b
elif operation == '//':
return a // b
elif operation == '/':
return a / b
elif operation == '~':
return ~a
return 'unknown'
| 15.142857
| 27
| 0.445283
| 55
| 530
| 4.290909
| 0.236364
| 0.076271
| 0.271186
| 0.40678
| 0.741525
| 0.741525
| 0.741525
| 0.741525
| 0.741525
| 0.741525
| 0
| 0
| 0.4
| 530
| 34
| 28
| 15.588235
| 0.742138
| 0.011321
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0
| 0
| 0.52381
| 0.047619
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
b91f956759f9bffb9e5aed456b3f1c06a9a13834
| 201
|
py
|
Python
|
glacier_flow_model/__init__.py
|
munterfi/glacier-flow-model
|
fd79b4dde841c7b49a2d9da57c203bb943873d49
|
[
"MIT"
] | null | null | null |
glacier_flow_model/__init__.py
|
munterfi/glacier-flow-model
|
fd79b4dde841c7b49a2d9da57c203bb943873d49
|
[
"MIT"
] | 14
|
2021-10-02T12:25:36.000Z
|
2022-03-30T13:03:39.000Z
|
glacier_flow_model/__init__.py
|
munterfi/glacier-flow-model
|
fd79b4dde841c7b49a2d9da57c203bb943873d49
|
[
"MIT"
] | null | null | null |
"""Top-level package for glacier-flow-model."""
from glacier_flow_model.data import PkgDataAccess
from glacier_flow_model.model import GlacierFlowModel
__all__ = ["GlacierFlowModel", "PkgDataAccess"]
| 33.5
| 53
| 0.81592
| 24
| 201
| 6.5
| 0.541667
| 0.211538
| 0.307692
| 0.25641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084577
| 201
| 5
| 54
| 40.2
| 0.847826
| 0.20398
| 0
| 0
| 0
| 0
| 0.188312
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b953c3e0ec8c25c52583ca3bc127c8fcf94d4196
| 1,398
|
py
|
Python
|
lib/staraligner.py
|
nirmalya-broad/PatHCap_PL
|
a790c9c9423ea4eaf43b9a71573137e370ae9269
|
[
"BSD-3-Clause"
] | null | null | null |
lib/staraligner.py
|
nirmalya-broad/PatHCap_PL
|
a790c9c9423ea4eaf43b9a71573137e370ae9269
|
[
"BSD-3-Clause"
] | null | null | null |
lib/staraligner.py
|
nirmalya-broad/PatHCap_PL
|
a790c9c9423ea4eaf43b9a71573137e370ae9269
|
[
"BSD-3-Clause"
] | null | null | null |
class StarAligner:
def __init__(self, confd, outdir, refpath, corenum = 1):
self.confd = confd
self.outdir = outdir
self.refpath = refpath
self.corenum = corenum
def align_paired(sample_id, read1, read2):
confd = self.confd
star_path = confd.STAR
outdir = self.outdir
refpath = self.refpath
ldelim = confd.ldelim
corenum = self.corenum
out_prefix = outdir + ldelim + sample_id
star_cmd = star_path + " --genomeDir " + refpath + " --readFilesIn " +
read1 + " " + read2 + " --outFileNamePrefix " + out_prefix +
" --runThreadN " + corenum
call(star_cmd.split())
print("Call: " + star_cmd)
sam_path = out_prefix + "Aligned.out.sam"
return sam_path
def align_single(sample_id, read):
confd = self.confd
star_path = confd.STAR
outdir = self.outdir
refpath = self.refpath
ldelim = confd.ldelim
corenum = self.corenum
out_prefix = outdir + ldelim + sample_id
star_cmd = star_path + " --genomeDir " + refpath + " --readFilesIn " +
read + " --outFileNamePrefix " + out_prefix +
" --runThreadN " + corenum
call(star_cmd.split())
print("Call: " + star_cmd)
sam_path = out_prefix + "Aligned.out.sam"
return sam_path
| 31.772727
| 78
| 0.5701
| 148
| 1,398
| 5.182432
| 0.22973
| 0.070404
| 0.057366
| 0.046936
| 0.750978
| 0.750978
| 0.750978
| 0.750978
| 0.750978
| 0.750978
| 0
| 0.005314
| 0.326896
| 1,398
| 43
| 79
| 32.511628
| 0.809777
| 0
| 0
| 0.722222
| 0
| 0
| 0.120974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b982b1a8754097043114722e4c6ee7cdffdf4303
| 23,543
|
py
|
Python
|
tests/api/v3_1_1/test_endpoint.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 36
|
2021-05-18T16:24:19.000Z
|
2022-03-05T13:44:41.000Z
|
tests/api/v3_1_1/test_endpoint.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 15
|
2021-06-08T19:03:37.000Z
|
2022-02-25T14:47:33.000Z
|
tests/api/v3_1_1/test_endpoint.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 6
|
2021-06-10T09:32:01.000Z
|
2022-01-12T08:34:39.000Z
|
# -*- coding: utf-8 -*-
"""IdentityServicesEngineAPI endpoint API fixtures and tests.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pytest
from fastjsonschema.exceptions import JsonSchemaException
from ciscoisesdk.exceptions import MalformedRequest
from ciscoisesdk.exceptions import ciscoisesdkException
from tests.environment import IDENTITY_SERVICES_ENGINE_VERSION
pytestmark = pytest.mark.skipif(IDENTITY_SERVICES_ENGINE_VERSION != '3.1.1', reason='version does not match')
def is_valid_release_rejected_endpoint(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_258969f4f97557daacb3dadaced526cc_v3_1_1').validate(obj.response)
return True
def release_rejected_endpoint(api):
endpoint_result = api.endpoint.release_rejected_endpoint(
active_validation=False,
id='string',
payload=None
)
return endpoint_result
@pytest.mark.endpoint
def test_release_rejected_endpoint(api, validator):
try:
assert is_valid_release_rejected_endpoint(
validator,
release_rejected_endpoint(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def release_rejected_endpoint_default(api):
endpoint_result = api.endpoint.release_rejected_endpoint(
active_validation=False,
id='string',
payload=None
)
return endpoint_result
@pytest.mark.endpoint
def test_release_rejected_endpoint_default(api, validator):
try:
assert is_valid_release_rejected_endpoint(
validator,
release_rejected_endpoint_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_deregister_endpoint(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_ed121b2686e85bd5b28c068c3c0de220_v3_1_1').validate(obj.response)
return True
def deregister_endpoint(api):
endpoint_result = api.endpoint.deregister_endpoint(
active_validation=False,
id='string',
payload=None
)
return endpoint_result
@pytest.mark.endpoint
def test_deregister_endpoint(api, validator):
try:
assert is_valid_deregister_endpoint(
validator,
deregister_endpoint(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def deregister_endpoint_default(api):
endpoint_result = api.endpoint.deregister_endpoint(
active_validation=False,
id='string',
payload=None
)
return endpoint_result
@pytest.mark.endpoint
def test_deregister_endpoint_default(api, validator):
try:
assert is_valid_deregister_endpoint(
validator,
deregister_endpoint_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_rejected_endpoints(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_f8a2f0834e625822bed1cb4cf34fde5e_v3_1_1').validate(obj.response)
return True
def get_rejected_endpoints(api):
endpoint_result = api.endpoint.get_rejected_endpoints(
)
return endpoint_result
@pytest.mark.endpoint
def test_get_rejected_endpoints(api, validator):
try:
assert is_valid_get_rejected_endpoints(
validator,
get_rejected_endpoints(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_rejected_endpoints_default(api):
endpoint_result = api.endpoint.get_rejected_endpoints(
)
return endpoint_result
@pytest.mark.endpoint
def test_get_rejected_endpoints_default(api, validator):
try:
assert is_valid_get_rejected_endpoints(
validator,
get_rejected_endpoints_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_endpoint_by_name(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_7d53842e83f0538cab91e097aa6800ce_v3_1_1').validate(obj.response)
return True
def get_endpoint_by_name(api):
endpoint_result = api.endpoint.get_endpoint_by_name(
name='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_get_endpoint_by_name(api, validator):
try:
assert is_valid_get_endpoint_by_name(
validator,
get_endpoint_by_name(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_endpoint_by_name_default(api):
endpoint_result = api.endpoint.get_endpoint_by_name(
name='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_get_endpoint_by_name_default(api, validator):
try:
assert is_valid_get_endpoint_by_name(
validator,
get_endpoint_by_name_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_endpoint_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_eb8e0ce63376573995a49178435f7747_v3_1_1').validate(obj.response)
return True
def get_endpoint_by_id(api):
endpoint_result = api.endpoint.get_endpoint_by_id(
id='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_get_endpoint_by_id(api, validator):
try:
assert is_valid_get_endpoint_by_id(
validator,
get_endpoint_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_endpoint_by_id_default(api):
endpoint_result = api.endpoint.get_endpoint_by_id(
id='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_get_endpoint_by_id_default(api, validator):
try:
assert is_valid_get_endpoint_by_id(
validator,
get_endpoint_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_update_endpoint_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_c8b30af4b84b5a90be2fc152cf26ad42_v3_1_1').validate(obj.response)
return True
def update_endpoint_by_id(api):
endpoint_result = api.endpoint.update_endpoint_by_id(
active_validation=False,
custom_attributes={'customAttributes': {}},
description='string',
group_id='string',
id='string',
identity_store='string',
identity_store_id='string',
mac='string',
mdm_attributes={'mdmServerName': 'string', 'mdmReachable': True, 'mdmEnrolled': True, 'mdmComplianceStatus': True, 'mdmOS': 'string', 'mdmManufacturer': 'string', 'mdmModel': 'string', 'mdmSerial': 'string', 'mdmEncrypted': True, 'mdmPinlock': True, 'mdmJailBroken': True, 'mdmIMEI': 'string', 'mdmPhoneNumber': 'string'},
name='string',
payload=None,
portal_user='string',
profile_id='string',
static_group_assignment=True,
static_profile_assignment=True
)
return endpoint_result
@pytest.mark.endpoint
def test_update_endpoint_by_id(api, validator):
try:
assert is_valid_update_endpoint_by_id(
validator,
update_endpoint_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def update_endpoint_by_id_default(api):
endpoint_result = api.endpoint.update_endpoint_by_id(
active_validation=False,
id='string',
custom_attributes=None,
description=None,
group_id=None,
identity_store=None,
identity_store_id=None,
mac=None,
mdm_attributes=None,
name=None,
payload=None,
portal_user=None,
profile_id=None,
static_group_assignment=None,
static_profile_assignment=None
)
return endpoint_result
@pytest.mark.endpoint
def test_update_endpoint_by_id_default(api, validator):
try:
assert is_valid_update_endpoint_by_id(
validator,
update_endpoint_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_delete_endpoint_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_36658f1cac5f578ab6509196266ad8e3_v3_1_1').validate(obj.response)
return True
def delete_endpoint_by_id(api):
endpoint_result = api.endpoint.delete_endpoint_by_id(
id='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_delete_endpoint_by_id(api, validator):
try:
assert is_valid_delete_endpoint_by_id(
validator,
delete_endpoint_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def delete_endpoint_by_id_default(api):
endpoint_result = api.endpoint.delete_endpoint_by_id(
id='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_delete_endpoint_by_id_default(api, validator):
try:
assert is_valid_delete_endpoint_by_id(
validator,
delete_endpoint_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_register_endpoint(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_dfaeea899c185169ae2a3b70b5491008_v3_1_1').validate(obj.response)
return True
def register_endpoint(api):
endpoint_result = api.endpoint.register_endpoint(
active_validation=False,
custom_attributes={'customAttributes': {}},
description='string',
group_id='string',
id='string',
identity_store='string',
identity_store_id='string',
mac='string',
mdm_attributes={'mdmServerName': 'string', 'mdmReachable': True, 'mdmEnrolled': True, 'mdmComplianceStatus': True, 'mdmOS': 'string', 'mdmManufacturer': 'string', 'mdmModel': 'string', 'mdmSerial': 'string', 'mdmEncrypted': True, 'mdmPinlock': True, 'mdmJailBroken': True, 'mdmIMEI': 'string', 'mdmPhoneNumber': 'string'},
name='string',
payload=None,
portal_user='string',
profile_id='string',
static_group_assignment=True,
static_profile_assignment=True
)
return endpoint_result
@pytest.mark.endpoint
def test_register_endpoint(api, validator):
try:
assert is_valid_register_endpoint(
validator,
register_endpoint(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def register_endpoint_default(api):
endpoint_result = api.endpoint.register_endpoint(
active_validation=False,
custom_attributes=None,
description=None,
group_id=None,
id=None,
identity_store=None,
identity_store_id=None,
mac=None,
mdm_attributes=None,
name=None,
payload=None,
portal_user=None,
profile_id=None,
static_group_assignment=None,
static_profile_assignment=None
)
return endpoint_result
@pytest.mark.endpoint
def test_register_endpoint_default(api, validator):
try:
assert is_valid_register_endpoint(
validator,
register_endpoint_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_endpoints(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_719765b7f7285d71be4645db91b0fc74_v3_1_1').validate(obj.response)
return True
def get_endpoints(api):
endpoint_result = api.endpoint.get_endpoints(
filter='value1,value2',
filter_type='string',
page=0,
size=0,
sortasc='string',
sortdsc='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_get_endpoints(api, validator):
try:
assert is_valid_get_endpoints(
validator,
get_endpoints(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_endpoints_default(api):
endpoint_result = api.endpoint.get_endpoints(
filter=None,
filter_type=None,
page=None,
size=None,
sortasc=None,
sortdsc=None
)
return endpoint_result
@pytest.mark.endpoint
def test_get_endpoints_default(api, validator):
try:
assert is_valid_get_endpoints(
validator,
get_endpoints_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_create_endpoint(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_845787ab88be5092bf4ba9f522e8e26f_v3_1_1').validate(obj.response)
return True
def create_endpoint(api):
endpoint_result = api.endpoint.create_endpoint(
active_validation=False,
custom_attributes={'customAttributes': {}},
description='string',
group_id='string',
identity_store='string',
identity_store_id='string',
mac='string',
mdm_attributes={'mdmServerName': 'string', 'mdmReachable': True, 'mdmEnrolled': True, 'mdmComplianceStatus': True, 'mdmOS': 'string', 'mdmManufacturer': 'string', 'mdmModel': 'string', 'mdmSerial': 'string', 'mdmEncrypted': True, 'mdmPinlock': True, 'mdmJailBroken': True, 'mdmIMEI': 'string', 'mdmPhoneNumber': 'string'},
name='string',
payload=None,
portal_user='string',
profile_id='string',
static_group_assignment=True,
static_profile_assignment=True
)
return endpoint_result
@pytest.mark.endpoint
def test_create_endpoint(api, validator):
try:
assert is_valid_create_endpoint(
validator,
create_endpoint(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def create_endpoint_default(api):
endpoint_result = api.endpoint.create_endpoint(
active_validation=False,
custom_attributes=None,
description=None,
group_id=None,
identity_store=None,
identity_store_id=None,
mac=None,
mdm_attributes=None,
name=None,
payload=None,
portal_user=None,
profile_id=None,
static_group_assignment=None,
static_profile_assignment=None
)
return endpoint_result
@pytest.mark.endpoint
def test_create_endpoint_default(api, validator):
try:
assert is_valid_create_endpoint(
validator,
create_endpoint_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_version(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_85adcb1d998d54838add3b4d644242af_v3_1_1').validate(obj.response)
return True
def get_version(api):
endpoint_result = api.endpoint.get_version(
)
return endpoint_result
@pytest.mark.endpoint
def test_get_version(api, validator):
try:
assert is_valid_get_version(
validator,
get_version(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_version_default(api):
endpoint_result = api.endpoint.get_version(
)
return endpoint_result
@pytest.mark.endpoint
def test_get_version_default(api, validator):
try:
assert is_valid_get_version(
validator,
get_version_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_bulk_request_for_endpoint(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_c03505504e8e5af8a715e27c40f16eab_v3_1_1').validate(obj.response)
return True
def bulk_request_for_endpoint(api):
endpoint_result = api.endpoint.bulk_request_for_endpoint(
active_validation=False,
operation_type='string',
payload=None,
resource_media_type='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_bulk_request_for_endpoint(api, validator):
try:
assert is_valid_bulk_request_for_endpoint(
validator,
bulk_request_for_endpoint(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def bulk_request_for_endpoint_default(api):
endpoint_result = api.endpoint.bulk_request_for_endpoint(
active_validation=False,
operation_type=None,
payload=None,
resource_media_type=None
)
return endpoint_result
@pytest.mark.endpoint
def test_bulk_request_for_endpoint_default(api, validator):
try:
assert is_valid_bulk_request_for_endpoint(
validator,
bulk_request_for_endpoint_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_monitor_bulk_status_endpoint(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_5b054a43ba875f0da3da5a7d863f3ef7_v3_1_1').validate(obj.response)
return True
def monitor_bulk_status_endpoint(api):
endpoint_result = api.endpoint.monitor_bulk_status_endpoint(
bulkid='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_monitor_bulk_status_endpoint(api, validator):
try:
assert is_valid_monitor_bulk_status_endpoint(
validator,
monitor_bulk_status_endpoint(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def monitor_bulk_status_endpoint_default(api):
endpoint_result = api.endpoint.monitor_bulk_status_endpoint(
bulkid='string'
)
return endpoint_result
@pytest.mark.endpoint
def test_monitor_bulk_status_endpoint_default(api, validator):
try:
assert is_valid_monitor_bulk_status_endpoint(
validator,
monitor_bulk_status_endpoint_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
| 29.914867
| 330
| 0.690821
| 2,671
| 23,543
| 5.804942
| 0.086484
| 0.03773
| 0.05366
| 0.033538
| 0.878684
| 0.868107
| 0.858949
| 0.851532
| 0.838697
| 0.814834
| 0
| 0.016903
| 0.226012
| 23,543
| 786
| 331
| 29.952926
| 0.833992
| 0.049017
| 0
| 0.714968
| 0
| 0
| 0.089402
| 0.024975
| 0
| 0
| 0
| 0
| 0.124204
| 1
| 0.103503
| false
| 0
| 0.007962
| 0
| 0.194268
| 0.020701
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9bdb3aba871f8ca027b570ba23681cb391e3eb8
| 99,762
|
py
|
Python
|
tests/test_created__numeric.py
|
ironmussa/Optimus
|
fbaea6e0957f0bc016280a85ff021904faac20c5
|
[
"Apache-2.0"
] | 1,045
|
2017-07-17T17:59:46.000Z
|
2021-06-15T07:06:48.000Z
|
tests/test_created__numeric.py
|
ironmussa/Optimus
|
fbaea6e0957f0bc016280a85ff021904faac20c5
|
[
"Apache-2.0"
] | 955
|
2017-07-14T15:47:58.000Z
|
2021-05-27T14:16:24.000Z
|
tests/test_created__numeric.py
|
ironmussa/Optimus
|
fbaea6e0957f0bc016280a85ff021904faac20c5
|
[
"Apache-2.0"
] | 226
|
2017-08-04T20:41:33.000Z
|
2021-05-21T08:28:33.000Z
|
import datetime
import numpy as np
from optimus.tests.base import TestBase
from optimus.helpers.json import json_encoding
from optimus.helpers.functions import deep_sort, df_dicts_equal, results_equal
def Timestamp(t):
return datetime.datetime.strptime(t, "%Y-%m-%d %H:%M:%S")
NaT = np.datetime64('NaT')
nan = float("nan")
inf = float("inf")
class TestNumericPandas(TestBase):
config = {'engine': 'pandas'}
dict = {('NullType', 'object'): [None, None, None, None, None, None], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan]}
maxDiff = None
def test_cols_abs(self):
df = self.create_dataframe(data={('abs_test', 'object'): [-1, '10', -inf, nan, 0, None]}, force_data_types=True)
result = df.cols.abs(cols=['abs_test'])
expected = self.create_dataframe(data={('abs_test', 'float64'): [1.0, 10.0, inf, nan, 0.0, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_abs_all(self):
df = self.df.copy()
result = df.cols.abs(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [10.0, 7.0, 7.0, 8.0, 10.0, 8.0], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [1.4734656e+18, 1.4391648e+18, 1.403568e+18, 1.372032e+18, 1.336608e+18, 1.3023936e+18], ('age', 'float64'): [5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_abs_multiple(self):
df = self.df.copy()
result = df.cols.abs(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_abs_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.abs(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [28.0, 17.0, 26.0, 13.0, nan, 300.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_abs_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.abs(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ceil(self):
df = self.create_dataframe(data={('ceil_test', 'object'): [inf, '12.342', 0, None, 1004.5, -27.7]}, force_data_types=True)
result = df.cols.ceil(cols=['ceil_test'])
expected = self.create_dataframe(data={('ceil_test', 'float64'): [inf, 13.0, 0.0, nan, 1005.0, -27.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ceil_all(self):
df = self.df.copy()
result = df.cols.ceil(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [10.0, 7.0, 7.0, 8.0, 10.0, 8.0], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [1.4734656e+18, 1.4391648e+18, 1.403568e+18, 1.372032e+18, 1.336608e+18, 1.3023936e+18], ('age', 'float64'): [5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18], ('weight(t)', 'float64'): [5.0, 2.0, 4.0, 2.0, 6.0, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ceil_multiple(self):
df = self.df.copy()
result = df.cols.ceil(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [5.0, 2.0, 4.0, 2.0, 6.0, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ceil_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.ceil(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ceil_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.ceil(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_exp(self):
df = self.create_dataframe(data={('exp_test', 'object'): [0, '0.5', -0.5, 2.718, inf, None]}, force_data_types=True)
result = df.cols.exp(cols=['exp_test'])
expected = self.create_dataframe(data={('exp_test', 'float64'): [1.0, 1.6487212707001282, 0.6065306597126334, 15.149991940878165, inf, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_exp_all(self):
df = self.df.copy()
result = df.cols.exp(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [6.914400106940203e-13, 24154952.7535753, 195729609428.83878, 442413.3920089205, nan, 1.9424263952412558e+130], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [22026.465794806718, 1096.6331584284585, 1096.6331584284585, 2980.9579870417283, 22026.465794806718, 2980.9579870417283], ('Cybertronian', 'float64'): [2.718281828459045, 2.718281828459045, 2.718281828459045, 2.718281828459045, 2.718281828459045, 1.0], ('Date Type', 'float64'): [inf, inf, inf, inf, inf, inf], ('age', 'float64'): [inf, inf, inf, inf, inf, inf], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [inf, inf, inf, inf, inf, inf], ('weight(t)', 'float64'): [73.69979369959579, 7.38905609893065, 54.598150033144236, 6.0496474644129465, 298.8674009670603, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_exp_multiple(self):
df = self.df.copy()
result = df.cols.exp(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [73.69979369959579, 7.38905609893065, 54.598150033144236, 6.0496474644129465, 298.8674009670603, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_exp_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.exp(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [6.914400106940203e-13, 24154952.7535753, 195729609428.83878, 442413.3920089205, nan, 1.9424263952412558e+130]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_exp_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.exp(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_floor(self):
df = self.create_dataframe(data={('floor_test', 'object'): [inf, '12.342', 0, None, 1004.5, -27.7]}, force_data_types=True)
result = df.cols.floor(cols=['floor_test'])
expected = self.create_dataframe(data={('floor_test', 'float64'): [inf, 12.0, 0.0, nan, 1004.0, -28.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_floor_all(self):
df = self.df.copy()
result = df.cols.floor(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [10.0, 7.0, 7.0, 8.0, 10.0, 8.0], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [1.4734656e+18, 1.4391648e+18, 1.403568e+18, 1.372032e+18, 1.336608e+18, 1.3023936e+18], ('age', 'float64'): [5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18], ('weight(t)', 'float64'): [4.0, 2.0, 4.0, 1.0, 5.0, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_floor_multiple(self):
df = self.df.copy()
result = df.cols.floor(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [4.0, 2.0, 4.0, 1.0, 5.0, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_floor_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.floor(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_floor_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.floor(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ln(self):
df = self.create_dataframe(data={('ln_test', 'object'): ['0.36', 1, inf, 0, 2.7182, -100]}, force_data_types=True)
result = df.cols.ln(cols=['ln_test'])
expected = self.create_dataframe(data={('ln_test', 'float64'): [-1.0216512475319814, 0.0, inf, -inf, 0.9999698965391098, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ln_all(self):
df = self.df.copy()
result = df.cols.ln(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [nan, 2.833213344056216, 3.258096538021482, 2.5649493574615367, nan, 5.703782474656201], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [2.302585092994046, 1.9459101490553132, 1.9459101490553132, 2.0794415416798357, 2.302585092994046, 2.0794415416798357], ('Cybertronian', 'float64'): [0.0, 0.0, 0.0, 0.0, 0.0, -inf], ('Date Type', 'float64'): [41.83414885104055, 41.810594619215664, 41.78554923984176, 41.76282452653968, 41.73666673530567, 41.71073547614197], ('age', 'float64'): [15.424948470398375, 15.424948470398375, 15.424948470398375, 15.424948470398375, 15.424948470398375, 15.424948470398375], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [41.78554923984176, 41.78554923984176, 41.78554923984176, 41.78554923984176, 41.78554923984176, 41.78554923984176], ('weight(t)', 'float64'): [1.4586150226995167, 0.6931471805599453, 1.3862943611198906, 0.5877866649021191, 1.7404661748405046, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ln_multiple(self):
df = self.df.copy()
result = df.cols.ln(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [1.4586150226995167, 0.6931471805599453, 1.3862943611198906, 0.5877866649021191, 1.7404661748405046, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ln_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.ln(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [nan, 2.833213344056216, 3.258096538021482, 2.5649493574615367, nan, 5.703782474656201]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_ln_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.ln(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_log(self):
df = self.create_dataframe(data={('log_test', 'float64'): [10.0, nan, inf, -356.0, 0.5314, 0.0]}, force_data_types=True)
result = df.cols.log(cols=['log_test'], base=10)
expected = self.create_dataframe(data={('log_test', 'float64'): [1.0, nan, inf, nan, -0.2745784499257413, -inf]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_log_1(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.log(cols=['height(ft)'], base=100.3)
expected = self.create_dataframe(data={('height(ft)', 'float64'): [nan, 0.6148245379175669, 0.7070267767453151, 0.5566096202361945, nan, 1.2377555088534953]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_log_2(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.log(cols=['height(ft)'], base=2.7182)
expected = self.create_dataframe(data={('height(ft)', 'float64'): [nan, 2.8332986361508996, 3.258194620955827, 2.5650265736386784, nan, 5.70395418341788]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_log_3(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.log(cols=['height(ft)'], base=-3)
expected = self.create_dataframe(data={('height(ft)', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_log_all(self):
df = self.df.copy()
result = df.cols.log(cols='*', base=12)
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [nan, 1.1401689251779061, 1.3111545008338428, 1.032211555182713, nan, 2.2953709247559937], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [0.9266284080291269, 0.783091851446946, 0.783091851446946, 0.8368288369533894, 0.9266284080291269, 0.8368288369533894], ('Cybertronian', 'float64'): [0.0, 0.0, 0.0, 0.0, 0.0, -inf], ('Date Type', 'float64'): [16.835299971775452, 16.825821051580643, 16.81574204946761, 16.806596952083762, 16.796070282505955, 16.785634776139585], ('age', 'float64'): [6.207455910552758, 6.207455910552758, 6.207455910552758, 6.207455910552758, 6.207455910552758, 6.207455910552758], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [16.81574204946761, 16.81574204946761, 16.81574204946761, 16.81574204946761, 16.81574204946761, 16.81574204946761], ('weight(t)', 'float64'): [0.5869898665303819, 0.2789429456511298, 0.5578858913022596, 0.23654275501748367, 0.7004151141810467, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_log_multiple(self):
df = self.df.copy()
result = df.cols.log(cols=['NullType', 'weight(t)', 'japanese name'], base=21, output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [0.4790948506821362, 0.227670248696953, 0.455340497393906, 0.1930636666096123, 0.5716713246304594, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_log_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.log(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [nan, 1.2304489213782739, 1.414973347970818, 1.1139433523068367, nan, 2.477121254719662]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_log_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.log(cols=['names'], base=2, output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_max_abs_scaler_all(self):
df = self.df.copy()
result = df.cols.max_abs_scaler(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [-0.09333333333333334, 0.056666666666666664, 0.08666666666666667, 0.043333333333333335, nan, 1.0], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [1.0, 0.7, 0.7, 0.8, 1.0, 0.8], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [1.0, 0.9767210038700598, 0.9525624486923889, 0.9311598451976076, 0.9071185645596341, 0.8838982056995426], ('age', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 1.0], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 1.0], ('weight(t)', 'float64'): [0.7543859649122806, 0.3508771929824561, 0.7017543859649122, 0.3157894736842105, 1.0, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_max_abs_scaler_multiple(self):
df = self.df.copy()
result = df.cols.max_abs_scaler(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [0.7543859649122806, 0.3508771929824561, 0.7017543859649122, 0.3157894736842105, 1.0, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_max_abs_scaler_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.max_abs_scaler(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [-0.09333333333333334, 0.056666666666666664, 0.08666666666666667, 0.043333333333333335, nan, 1.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_max_abs_scaler_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.max_abs_scaler(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_min_max_scaler_all(self):
df = self.df.copy()
result = df.cols.min_max_scaler(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [0.0, 0.13719512195121952, 0.16463414634146342, 0.125, nan, 1.0], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [1.0, 0.0, 0.0, 0.3333333333333335, 1.0, 0.3333333333333335], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [1.0000000000000009, 0.7994949494949504, 0.5914141414141421, 0.4070707070707078, 0.20000000000000018, 0.0], ('age', 'float64'): [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], ('weight(t)', 'float64'): [0.641025641025641, 0.051282051282051266, 0.5641025641025641, 0.0, 1.0, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_min_max_scaler_multiple(self):
df = self.df.copy()
result = df.cols.min_max_scaler(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [0.641025641025641, 0.051282051282051266, 0.5641025641025641, 0.0, 1.0, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_min_max_scaler_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.min_max_scaler(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [0.0, 0.13719512195121952, 0.16463414634146342, 0.125, nan, 1.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_min_max_scaler_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.min_max_scaler(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_mod(self):
df = self.create_dataframe(data={('mod_test', 'float64'): [10.0, nan, inf, -356.0, 0.5314, 0.0]}, force_data_types=True)
result = df.cols.mod(cols=['mod_test'], divisor=3)
expected = self.create_dataframe(data={('mod_test', 'float64'): [1.0, nan, nan, 1.0, 0.5314, 0.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_mod_1(self):
df = self.create_dataframe(data={('mod_test', 'float64'): [10.0, nan, inf, -356.0, 0.5314, 0.0]}, force_data_types=True)
result = df.cols.mod(cols=['mod_test'], divisor=100.3)
expected = self.create_dataframe(data={('mod_test', 'float64'): [10.0, nan, nan, 45.19999999999999, 0.5314, 0.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_mod_2(self):
df = self.create_dataframe(data={('mod_test', 'float64'): [10.0, nan, inf, -356.0, 0.5314, 0.0]}, force_data_types=True)
result = df.cols.mod(cols=['mod_test'], divisor=6)
expected = self.create_dataframe(data={('mod_test', 'float64'): [4.0, nan, nan, 4.0, 0.5314, 0.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_mod_3(self):
df = self.create_dataframe(data={('mod_test', 'float64'): [10.0, nan, inf, -356.0, 0.5314, 0.0]}, force_data_types=True)
result = df.cols.mod(cols=['mod_test'], divisor=-12)
expected = self.create_dataframe(data={('mod_test', 'float64'): [-2.0, nan, nan, -8.0, -11.4686, -0.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_mod_all(self):
df = self.df.copy()
result = df.cols.mod(cols='*', divisor=5)
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [2.0, 2.0, 1.0, 3.0, nan, 0.0], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [0.0, 2.0, 2.0, 3.0, 0.0, 3.0], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], ('age', 'float64'): [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 0.7000000000000002, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_mod_multiple(self):
df = self.df.copy()
result = df.cols.mod(cols=['NullType', 'weight(t)', 'japanese name'], divisor=10, output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_mod_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.mod(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [0.0, 1.0, 0.0, 1.0, nan, 0.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_mod_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.mod(cols=['names'], divisor=4, output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_modified_z_score_all(self):
df = self.df.copy()
result = df.cols.modified_z_score(cols='*', estimate=False)
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [3.3725, 0.0, 0.6745, 0.29977777777777775, nan, 21.20927777777778], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [1.349, 0.6745, 0.6745, 0.0, 1.349, 0.0], ('Cybertronian', 'float64'): [nan, nan, nan, nan, nan, inf], ('Date Type', 'float64'): [1.1268184498736311, 0.6756364785172704, 0.2074073294018534, 0.2074073294018534, 0.6733635214827296, 1.1234090143218196], ('age', 'float64'): [nan, nan, nan, nan, nan, nan], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [nan, nan, nan, nan, nan, nan], ('weight(t)', 'float64'): [0.11902941176470579, 0.7935294117647058, 0.0, 0.8728823529411763, 0.6745, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_modified_z_score_multiple(self):
df = self.df.copy()
result = df.cols.modified_z_score(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'], estimate=False)
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [0.11902941176470579, 0.7935294117647058, 0.0, 0.8728823529411763, 0.6745, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_modified_z_score_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.modified_z_score(cols=['height(ft)'], estimate=False)
expected = self.create_dataframe(data={('height(ft)', 'float64'): [3.3725, 0.0, 0.6745, 0.29977777777777775, nan, 21.20927777777778]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_modified_z_score_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.modified_z_score(cols=['names'], output_cols=['names_2'], estimate=False)
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_pow(self):
df = self.create_dataframe(data={('pow_test', 'float64'): [10.0, nan, -inf, -356.0, 0.5314, 0.0]}, force_data_types=True)
result = df.cols.pow(cols=['pow_test'], power=2)
expected = self.create_dataframe(data={('pow_test', 'float64'): [100.0, nan, inf, 126736.0, 0.28238596, 0.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_pow_1(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.pow(cols=['height(ft)'], power=0.5)
expected = self.create_dataframe(data={('height(ft)', 'float64'): [nan, 4.123105625617661, 5.0990195135927845, 3.605551275463989, nan, 17.320508075688775]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_pow_2(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.pow(cols=['height(ft)'], power=10)
expected = self.create_dataframe(data={('height(ft)', 'float64'): [296196766695424.0, 2015993900449.0, 141167095653376.0, 137858491849.0, nan, 5.9049e+24]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_pow_3(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.pow(cols=['height(ft)'], power=-5)
expected = self.create_dataframe(data={('height(ft)', 'float64'): [-5.8104510025584576e-08, 7.042962777237426e-07, 8.416533573215762e-08, 2.693290743429044e-06, nan, 4.1152263374485594e-13]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_pow_all(self):
df = self.df.copy()
result = df.cols.pow(cols='*', power=3)
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [-21952.0, 4913.0, 17576.0, 2197.0, nan, 27000000.0], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [1000.0, 343.0, 343.0, 512.0, 1000.0, 512.0], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [3.199042452533802e+54, 2.9807914007124516e+54, 2.7650333540436665e+54, 2.582811561078817e+54, 2.3878781881420677e+54, 2.2091579100654466e+54], ('age', 'float64'): [1.25e+20, 1.25e+20, 1.25e+20, 1.25e+20, 1.25e+20, 1.25e+20], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [2.7650333540436665e+54, 2.7650333540436665e+54, 2.7650333540436665e+54, 2.7650333540436665e+54, 2.7650333540436665e+54, 2.7650333540436665e+54], ('weight(t)', 'float64'): [79.50699999999999, 8.0, 64.0, 5.832000000000001, 185.193, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_pow_multiple(self):
df = self.df.copy()
result = df.cols.pow(cols=['NullType', 'weight(t)', 'japanese name'], power=117, output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [1.3055974789624274e+74, 1.661534994731145e+35, 2.7606985387162255e+70, 7.360089527435959e+29, 2.73752512412485e+88, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_pow_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.pow(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [784.0, 289.0, 676.0, 169.0, nan, 90000.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_pow_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.pow(cols=['names'], power=3.7, output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_reciprocal(self):
df = self.create_dataframe(data={('reciprocal_test', 'object'): [1, 0, -inf, '237', None, 0.125]}, force_data_types=True)
result = df.cols.reciprocal(cols=['reciprocal_test'])
expected = self.create_dataframe(data={('reciprocal_test', 'float64'): [1.0, inf, -0.0, 0.004219409282700422, nan, 8.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_reciprocal_all(self):
df = self.df.copy()
result = df.cols.reciprocal(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [-0.03571428571428571, 0.058823529411764705, 0.038461538461538464, 0.07692307692307693, nan, 0.0033333333333333335], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [0.1, 0.14285714285714285, 0.14285714285714285, 0.125, 0.1, 0.125], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, inf], ('Date Type', 'float64'): [6.786721047305075e-19, 6.948474559689064e-19, 7.12469933768795e-19, 7.28845974437914e-19, 7.481625128683952e-19, 7.678170408699797e-19], ('age', 'float64'): [2e-07, 2e-07, 2e-07, 2e-07, 2e-07, 2e-07], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [7.12469933768795e-19, 7.12469933768795e-19, 7.12469933768795e-19, 7.12469933768795e-19, 7.12469933768795e-19, 7.12469933768795e-19], ('weight(t)', 'float64'): [0.23255813953488372, 0.5, 0.25, 0.5555555555555556, 0.17543859649122806, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_reciprocal_multiple(self):
df = self.df.copy()
result = df.cols.reciprocal(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [0.23255813953488372, 0.5, 0.25, 0.5555555555555556, 0.17543859649122806, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_reciprocal_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.reciprocal(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [-0.03571428571428571, 0.058823529411764705, 0.038461538461538464, 0.07692307692307693, nan, 0.0033333333333333335]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_reciprocal_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.reciprocal(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_round(self):
df = self.create_dataframe(data={('round_test', 'float64'): [10.0, nan, -inf, -356.312312, 0.5314, 1.000009]}, force_data_types=True)
result = df.cols.round(cols=['round_test'], decimals=2)
expected = self.create_dataframe(data={('round_test', 'float64'): [10.0, nan, -inf, -356.31, 0.53, 1.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_round_1(self):
df = self.create_dataframe(data={('round_test', 'float64'): [10.0, nan, -inf, -356.312312, 0.5314, 1.000009]}, force_data_types=True)
result = df.cols.round(cols=['round_test'], decimals=1)
expected = self.create_dataframe(data={('round_test', 'float64'): [10.0, nan, -inf, -356.3, 0.5, 1.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_round_2(self):
df = self.create_dataframe(data={('round_test', 'float64'): [10.0, nan, -inf, -356.312312, 0.5314, 1.000009]}, force_data_types=True)
result = df.cols.round(cols=['round_test'], decimals=2)
expected = self.create_dataframe(data={('round_test', 'float64'): [10.0, nan, -inf, -356.31, 0.53, 1.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_round_3(self):
df = self.create_dataframe(data={('round_test', 'float64'): [10.0, nan, -inf, -356.312312, 0.5314, 1.000009]}, force_data_types=True)
result = df.cols.round(cols=['round_test'], decimals=5)
expected = self.create_dataframe(data={('round_test', 'float64'): [10.0, nan, -inf, -356.31231, 0.5314, 1.00001]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_round_all(self):
df = self.df.copy()
result = df.cols.round(cols='*', decimals=4)
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [10.0, 7.0, 7.0, 8.0, 10.0, 8.0], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [1.4734656e+18, 1.4391648e+18, 1.403568e+18, 1.372032e+18, 1.336608e+18, 1.3023936e+18], ('age', 'float64'): [5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_round_multiple(self):
df = self.df.copy()
result = df.cols.round(cols=['NullType', 'weight(t)', 'japanese name'], decimals=21, output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_round_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.round(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_round_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.round(cols=['names'], decimals=5, output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_sqrt(self):
df = self.create_dataframe(data={('sqrt_test', 'object'): ['10000', 0.25, -81, inf, 0, 1]}, force_data_types=True)
result = df.cols.sqrt(cols=['sqrt_test'])
expected = self.create_dataframe(data={('sqrt_test', 'float64'): [100.0, 0.5, nan, inf, 0.0, 1.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_sqrt_all(self):
df = self.df.copy()
result = df.cols.sqrt(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [nan, 4.123105625617661, 5.0990195135927845, 3.605551275463989, nan, 17.320508075688775], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [3.1622776601683795, 2.6457513110645907, 2.6457513110645907, 2.8284271247461903, 3.1622776601683795, 2.8284271247461903], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [1213863913.2950613, 1199651949.5253613, 1184722752.3771122, 1171337696.8235931, 1156117641.0729144, 1141224605.4129748], ('age', 'float64'): [2236.06797749979, 2236.06797749979, 2236.06797749979, 2236.06797749979, 2236.06797749979, 2236.06797749979], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [1184722752.3771122, 1184722752.3771122, 1184722752.3771122, 1184722752.3771122, 1184722752.3771122, 1184722752.3771122], ('weight(t)', 'float64'): [2.073644135332772, 1.4142135623730951, 2.0, 1.3416407864998738, 2.3874672772626644, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_sqrt_multiple(self):
df = self.df.copy()
result = df.cols.sqrt(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [2.073644135332772, 1.4142135623730951, 2.0, 1.3416407864998738, 2.3874672772626644, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_sqrt_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.sqrt(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [nan, 4.123105625617661, 5.0990195135927845, 3.605551275463989, nan, 17.320508075688775]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_sqrt_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.sqrt(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_standard_scaler_all(self):
df = self.df.copy()
result = df.cols.standard_scaler(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [-0.7888071163227179, -0.4095729257829497, -0.333726087674996, -0.44328263160870685, nan, 1.9753887613893708], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [1.3363062095621216, -1.069044967649698, -1.069044967649698, -0.2672612419124249, 1.3363062095621216, -0.2672612419124249], ('Cybertronian', 'float64'): [0.4472135954999578, 0.4472135954999578, 0.4472135954999578, 0.4472135954999578, 0.4472135954999578, -2.23606797749979], ('Date Type', 'float64'): [1.4683126020200388, 0.879900654171497, 0.26925651685764507, -0.2717267601315684, -0.8794066055167122, -1.4663364074009], ('age', 'float64'): [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], ('weight(t)', 'float64'): [0.5022984399896845, -1.0588994140323083, 0.2986639372911638, -1.1946557491646554, 1.4525927859161152, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_standard_scaler_multiple(self):
df = self.df.copy()
result = df.cols.standard_scaler(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [0.5022984399896845, -1.0588994140323083, 0.2986639372911638, -1.1946557491646554, 1.4525927859161152, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_standard_scaler_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.standard_scaler(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [-0.7888071163227179, -0.4095729257829497, -0.333726087674996, -0.44328263160870685, nan, 1.9753887613893708]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_standard_scaler_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.standard_scaler(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_float(self):
df = self.create_dataframe(data={('to_float_test', 'object'): [-inf, 10001, 0, None, '-41', 5]}, force_data_types=True)
result = df.cols.to_float(cols=['to_float_test'])
expected = self.create_dataframe(data={('to_float_test', 'float64'): [-inf, 10001.0, 0.0, nan, -41.0, 5.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_float_all(self):
df = self.df.copy()
result = df.cols.to_float(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [10.0, 7.0, 7.0, 8.0, 10.0, 8.0], ('Cybertronian', 'float64'): [1.0, 1.0, 1.0, 1.0, 1.0, 0.0], ('Date Type', 'float64'): [1.4734656e+18, 1.4391648e+18, 1.403568e+18, 1.372032e+18, 1.336608e+18, 1.3023936e+18], ('age', 'float64'): [5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0, 5000000.0], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18, 1.403568e+18], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_float_multiple(self):
df = self.df.copy()
result = df.cols.to_float(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_float_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.to_float(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_float_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.to_float(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_integer(self):
df = self.create_dataframe(data={('to_integer_test', 'object'): [inf, '12.342', 0.32, None, 1004.5, -27.7]}, force_data_types=True)
result = df.cols.to_integer(cols=['to_integer_test'])
expected = self.create_dataframe(data={('to_integer_test', 'int32'): [0, 12, 0, 0, 1004, -27]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_integer_all(self):
df = self.df.copy()
result = df.cols.to_integer(cols='*')
expected = self.create_dataframe(data={('NullType', 'int32'): [0, 0, 0, 0, 0, 0], ('attributes', 'int64'): [0, 0, 0, 0, 0, 0], ('date arrival', 'int32'): [0, 0, 0, 0, 0, 0], ('function(binary)', 'int32'): [0, 0, 0, 0, 0, 0], ('height(ft)', 'int32'): [-28, 17, 26, 13, 0, 300], ('japanese name', 'int64'): [0, 0, 0, 0, 0, 0], ('last date seen', 'int32'): [0, 0, 0, 0, 0, 0], ('last position seen', 'int32'): [0, 0, 0, 0, 0, 0], ('rank', 'int32'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'int32'): [1, 1, 1, 1, 1, 0], ('Date Type', 'int64'): [1473465600000000000, 1439164800000000000, 1403568000000000000, 1372032000000000000, 1336608000000000000, 1302393600000000000], ('age', 'int32'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'int32'): [0, 0, 0, 0, 0, 0], ('names', 'int32'): [0, 0, 0, 0, 0, 0], ('timestamp', 'int64'): [1403568000000000000, 1403568000000000000, 1403568000000000000, 1403568000000000000, 1403568000000000000, 1403568000000000000], ('weight(t)', 'int32'): [4, 2, 4, 1, 5, 0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_integer_multiple(self):
df = self.df.copy()
result = df.cols.to_integer(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'int32'): [0, 0, 0, 0, 0, 0], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'int64'): [0, 0, 0, 0, 0, 0], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'int32'): [4, 2, 4, 1, 5, 0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_integer_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.to_integer(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'int32'): [-28, 17, 26, 13, 0, 300]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_to_integer_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.to_integer(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'int32'): [0, 0, 0, 0, 0, 0]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_z_score_all(self):
df = self.df.copy()
result = df.cols.z_score(cols='*')
expected = self.create_dataframe(data={('NullType', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'float64'): [nan, nan, nan, nan, nan, nan], ('date arrival', 'float64'): [nan, nan, nan, nan, nan, nan], ('function(binary)', 'float64'): [nan, nan, nan, nan, nan, nan], ('height(ft)', 'float64'): [-28.552839175542417, 16.447160824457583, 25.447160824457583, 12.447160824457582, nan, 299.44716082445757], ('japanese name', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('last position seen', 'float64'): [nan, nan, nan, nan, nan, nan], ('rank', 'float64'): [3.3184689521893898, 0.31846895218938975, 0.31846895218938975, 1.3184689521893898, 3.3184689521893898, 1.3184689521893898], ('Cybertronian', 'float64'): [-1.2360679774997898, -1.2360679774997898, -1.2360679774997898, -1.2360679774997898, -1.2360679774997898, -2.23606797749979], ('Date Type', 'float64'): [1.4734656e+18, 1.4391648e+18, 1.403568e+18, 1.372032e+18, 1.336608e+18, 1.3023936e+18], ('age', 'float64'): [-inf, -inf, -inf, -inf, -inf, -inf], ('function', 'float64'): [nan, nan, nan, nan, nan, nan], ('names', 'float64'): [nan, nan, nan, nan, nan, nan], ('timestamp', 'float64'): [-inf, -inf, -inf, -inf, -inf, -inf], ('weight(t)', 'float64'): [1.8835372346442196, -0.4164627653557802, 1.5835372346442198, -0.6164627653557802, 3.28353723464422, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_z_score_multiple(self):
df = self.df.copy()
result = df.cols.z_score(cols=['NullType', 'weight(t)', 'japanese name'], output_cols=['nt', 'wt', 'jn'])
expected = self.create_dataframe(data={('NullType', 'object'): [None, None, None, None, None, None], ('nt', 'float64'): [nan, nan, nan, nan, nan, nan], ('attributes', 'object'): [[8.5344, 4300.0], [5.334, 2000.0], [7.9248, 4000.0], [3.9624, 1800.0], [None, 5700.0], [91.44, None]], ('date arrival', 'object'): ['1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10', '1980/04/10'], ('function(binary)', 'object'): [bytearray(b'Leader'), bytearray(b'Espionage'), bytearray(b'Security'), bytearray(b'First Lieutenant'), bytearray(b'None'), bytearray(b'Battle Station')], ('height(ft)', 'float64'): [-28.0, 17.0, 26.0, 13.0, nan, 300.0], ('japanese name', 'object'): [['Inochi', 'Convoy'], ['Bumble', 'Goldback'], ['Roadbuster'], ['Meister'], ['Megatron'], ['Metroflex']], ('jn', 'float64'): [nan, nan, nan, nan, nan, nan], ('last date seen', 'object'): ['2016/09/10', '2015/08/10', '2014/07/10', '2013/06/10', '2012/05/10', '2011/04/10'], ('last position seen', 'object'): ['19.442735,-99.201111', '10.642707,-71.612534', '37.789563,-122.400356', '33.670666,-117.841553', None, None], ('rank', 'int64'): [10, 7, 7, 8, 10, 8], ('Cybertronian', 'bool'): [True, True, True, True, True, False], ('Date Type', 'datetime64[ns]'): [Timestamp('2016-09-10 00:00:00'), Timestamp('2015-08-10 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2013-06-24 00:00:00'), Timestamp('2012-05-10 00:00:00'), Timestamp('2011-04-10 00:00:00')], ('age', 'int64'): [5000000, 5000000, 5000000, 5000000, 5000000, 5000000], ('function', 'object'): ['Leader', 'Espionage', 'Security', 'First Lieutenant', 'None', 'Battle Station'], ('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('timestamp', 'datetime64[ns]'): [Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00'), Timestamp('2014-06-24 00:00:00')], ('weight(t)', 'float64'): [4.3, 2.0, 4.0, 1.8, 5.7, nan], ('wt', 'float64'): [1.8835372346442196, -0.4164627653557802, 1.5835372346442198, -0.6164627653557802, 3.28353723464422, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_z_score_numeric(self):
df = self.df.copy().cols.select(['height(ft)'])
result = df.cols.z_score(cols=['height(ft)'])
expected = self.create_dataframe(data={('height(ft)', 'float64'): [-28.552839175542417, 16.447160824457583, 25.447160824457583, 12.447160824457582, nan, 299.44716082445757]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
def test_cols_z_score_string(self):
df = self.df.copy().cols.select(['names'])
result = df.cols.z_score(cols=['names'], output_cols=['names_2'])
expected = self.create_dataframe(data={('names', 'object'): ['Optimus', 'bumbl#ebéé ', 'ironhide&', 'Jazz', 'Megatron', 'Metroplex_)^$'], ('names_2', 'float64'): [nan, nan, nan, nan, nan, nan]}, force_data_types=True)
self.assertTrue(result.equals(expected, decimal=True, assertion=True))
class TestNumericDask(TestNumericPandas):
config = {'engine': 'dask', 'n_partitions': 1}
class TestNumericPartitionDask(TestNumericPandas):
config = {'engine': 'dask', 'n_partitions': 2}
try:
import cudf # pyright: reportMissingImports=false
except:
pass
else:
class TestNumericCUDF(TestNumericPandas):
config = {'engine': 'cudf'}
try:
import dask_cudf # pyright: reportMissingImports=false
except:
pass
else:
class TestNumericDC(TestNumericPandas):
config = {'engine': 'dask_cudf', 'n_partitions': 1}
try:
import dask_cudf # pyright: reportMissingImports=false
except:
pass
else:
class TestNumericPartitionDC(TestNumericPandas):
config = {'engine': 'dask_cudf', 'n_partitions': 2}
try:
import pyspark # pyright: reportMissingImports=false
except:
pass
else:
class TestNumericSpark(TestNumericPandas):
config = {'engine': 'spark'}
try:
import vaex # pyright: reportMissingImports=false
except:
pass
else:
class TestNumericVaex(TestNumericPandas):
config = {'engine': 'vaex'}
| 152.07622
| 2,193
| 0.63854
| 14,531
| 99,762
| 4.322277
| 0.035097
| 0.099639
| 0.119079
| 0.119031
| 0.933829
| 0.927381
| 0.912223
| 0.897798
| 0.885729
| 0.881494
| 0
| 0.211284
| 0.121599
| 99,762
| 655
| 2,194
| 152.308397
| 0.505438
| 0.001794
| 0
| 0.442379
| 0
| 0
| 0.273386
| 0.008014
| 0
| 0
| 0
| 0
| 0.180297
| 1
| 0.182156
| false
| 0.009294
| 0.018587
| 0.001859
| 0.236059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b9f3d348f13fced5c83b02cc1caa04da9e8c8e42
| 68,618
|
py
|
Python
|
benchmarks/SimResults/combinations_spec_heteroFair/oldstuff/cmp_bwavesgcccactusADMastar/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_heteroFair/oldstuff/cmp_bwavesgcccactusADMastar/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_heteroFair/oldstuff/cmp_bwavesgcccactusADMastar/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0515867,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.243207,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.28639,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.310493,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.537662,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.308364,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.15652,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.263002,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.89581,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0541052,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0112556,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.100388,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0832422,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.154493,
'Execution Unit/Register Files/Runtime Dynamic': 0.0944978,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.256712,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.667311,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.56691,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00184096,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00184096,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00161805,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000634341,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00119578,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00649575,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0171304,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0800228,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.09014,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.219065,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.271794,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.55947,
'Instruction Fetch Unit/Runtime Dynamic': 0.594508,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0711891,
'L2/Runtime Dynamic': 0.0112287,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.97084,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.32608,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0884423,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0884423,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.39018,
'Load Store Unit/Runtime Dynamic': 1.85069,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.218084,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.436167,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0773986,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0783961,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.316486,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0361246,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.608792,
'Memory Management Unit/Runtime Dynamic': 0.114521,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 23.0871,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.188761,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0181483,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.159326,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.366236,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 5.5041,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0297105,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.226025,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.180478,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.150276,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.242389,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.12235,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.515015,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.144202,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.41151,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0340961,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00630324,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0558891,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0466163,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0899852,
'Execution Unit/Register Files/Runtime Dynamic': 0.0529195,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.125172,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.331048,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.53038,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0011238,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0011238,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0010197,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000417092,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000669647,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00393695,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0093149,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0448134,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.85052,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.13055,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.152207,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.20738,
'Instruction Fetch Unit/Runtime Dynamic': 0.340822,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0321084,
'L2/Runtime Dynamic': 0.00693796,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.79982,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.761838,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0505569,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.050557,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.03856,
'Load Store Unit/Runtime Dynamic': 1.06173,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.124665,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.24933,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0442439,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0446877,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.177235,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0215156,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.409347,
'Memory Management Unit/Runtime Dynamic': 0.0662033,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.6884,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0896915,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00787155,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0753285,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.172892,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.17896,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0290058,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.225471,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.16406,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.144181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.232558,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.117387,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.494126,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.139748,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.38109,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0309945,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00604758,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0542892,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0447256,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0852837,
'Execution Unit/Register Files/Runtime Dynamic': 0.0507732,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.121625,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.313441,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.48919,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00119206,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00119206,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00107662,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000437741,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000642486,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00410323,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0100599,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0429958,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.7349,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.125588,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.146033,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.08615,
'Instruction Fetch Unit/Runtime Dynamic': 0.32878,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0367448,
'L2/Runtime Dynamic': 0.00559721,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.74347,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.730886,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0487341,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0487341,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.97361,
'Load Store Unit/Runtime Dynamic': 1.01996,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.12017,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.24034,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0426487,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0431495,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.170046,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0207395,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.399418,
'Memory Management Unit/Runtime Dynamic': 0.0638889,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.4665,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0815329,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00749727,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0721468,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.161177,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.06859,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0640755,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.253017,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.398557,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.160797,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.259359,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.130916,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.551071,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.1228,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.73526,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0752959,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00674453,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0706244,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.04988,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.14592,
'Execution Unit/Register Files/Runtime Dynamic': 0.0566245,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.164808,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.431271,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.69736,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000471273,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000471273,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000426619,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000173979,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00071653,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00208569,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00394182,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0479509,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.05008,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.124567,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.162863,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.41663,
'Instruction Fetch Unit/Runtime Dynamic': 0.341408,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0365005,
'L2/Runtime Dynamic': 0.00833412,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.13936,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.923809,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0615421,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0615421,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.42998,
'Load Store Unit/Runtime Dynamic': 1.28886,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.151752,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.303505,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0538574,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0543769,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.189643,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0205058,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.438269,
'Memory Management Unit/Runtime Dynamic': 0.0748827,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.6461,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.19807,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00966517,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.078401,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.286136,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.69698,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 5.023788483430367,
'Runtime Dynamic': 5.023788483430367,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.263912,
'Runtime Dynamic': 0.0723497,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 74.152,
'Peak Power': 107.264,
'Runtime Dynamic': 15.521,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 73.8881,
'Total Cores/Runtime Dynamic': 15.4486,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.263912,
'Total L3s/Runtime Dynamic': 0.0723497,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.074398
| 124
| 0.682124
| 8,082
| 68,618
| 5.785449
| 0.067681
| 0.12353
| 0.112922
| 0.093417
| 0.939454
| 0.932161
| 0.918174
| 0.887057
| 0.862826
| 0.842273
| 0
| 0.132095
| 0.2243
| 68,618
| 914
| 125
| 75.074398
| 0.746369
| 0
| 0
| 0.642232
| 0
| 0
| 0.657325
| 0.048092
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbdfd2bd89676fea7eb6feea87dbd97c2b61e41b
| 56,453
|
py
|
Python
|
venv/lib/python3.8/site-packages/mogp_emulator/tests/test_Kernel.py
|
AndrewKirby2/data_synthesis
|
656858137a348fd5dcb57bcd04bdfece2b9eac1b
|
[
"MIT"
] | 21
|
2021-01-20T07:02:12.000Z
|
2022-03-30T21:09:04.000Z
|
venv/lib/python3.8/site-packages/mogp_emulator/tests/test_Kernel.py
|
AndrewKirby2/data_synthesis
|
656858137a348fd5dcb57bcd04bdfece2b9eac1b
|
[
"MIT"
] | 114
|
2019-04-25T14:53:11.000Z
|
2021-01-06T17:07:41.000Z
|
venv/lib/python3.8/site-packages/mogp_emulator/tests/test_Kernel.py
|
AndrewKirby2/data_synthesis
|
656858137a348fd5dcb57bcd04bdfece2b9eac1b
|
[
"MIT"
] | 8
|
2021-02-02T08:56:12.000Z
|
2022-02-15T10:03:15.000Z
|
import numpy as np
import pytest
from numpy.testing import assert_allclose
from ..Kernel import Kernel, SquaredExponential, Matern52
def test_calc_r():
"test function for calc_r function for kernels"
k = Kernel()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
assert_allclose(k.calc_r(x, y, params), np.array([[1., 2.], [0., 1.]]))
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
assert_allclose(k.calc_r(x, y, params),
np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]]))
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), 0.])
assert_allclose(k.calc_r(x, y, params),
np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]]))
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
assert_allclose(k.calc_r(x, y, params), np.array([[1., 2.], [0., 1.]]))
def test_calc_r_failures():
"test scenarios where calc_r should raise an exception"
k = Kernel()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.calc_r(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.calc_r(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_r(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_r(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_r(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_r(x, y, params)
with pytest.raises(FloatingPointError):
k.calc_r(y, y, np.array([800., 0.]))
def test_calc_drdtheta():
"test calc_drdtheta function"
k = Kernel()
dx = 1.e-6
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
r = np.array([[1., 2.], [1., 1.]])
deriv = np.zeros((1, 2, 2))
deriv[0] = 0.5*np.array([[1., 4.], [0., 1.]])/r
deriv_fd = np.zeros((1, 2, 2))
deriv_fd[0] = (k.calc_r(x, y, params) -
k.calc_r(x, y, params - np.array([dx, 0.])))/dx
assert_allclose(k.calc_drdtheta(x, y, params), deriv)
assert_allclose(k.calc_drdtheta(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
r = np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]])
deriv = np.zeros((2, 2, 2))
deriv[0] = 0.5*np.array([[1., 4.], [0., 1.]])/r
deriv[1] = 0.5*np.array([[4., 1.], [1., 4.]])/r
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.calc_r(x, y, params) -
k.calc_r(x, y, params - np.array([dx, 0., 0.])))/dx
deriv_fd[1] = (k.calc_r(x, y, params) -
k.calc_r(x, y, params - np.array([0., dx, 0.])))/dx
assert_allclose(k.calc_drdtheta(x, y, params), deriv)
assert_allclose(k.calc_drdtheta(x, y, params), deriv_fd, rtol = 1.e-5)
deriv = np.zeros((2, 2, 2))
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), 0.])
r = np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])
deriv = np.zeros((2, 2, 2))
deriv[0] = 0.5*2.*np.array([[1., 4.], [0., 1.]])/r
deriv[1] = 0.5*4.*np.array([[4., 1.], [1., 4.]])/r
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.calc_r(x, y, params) -
k.calc_r(x, y, params - np.array([dx, 0., 0.])))/dx
deriv_fd[1] = (k.calc_r(x, y, params) -
k.calc_r(x, y, params - np.array([0., dx, 0.])))/dx
assert_allclose(k.calc_drdtheta(x, y, params), deriv)
assert_allclose(k.calc_drdtheta(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
r = np.array([[1., 2.], [1., 1.]])
deriv = np.zeros((1, 2, 2))
deriv[0] = 0.5*np.array([[1., 4.], [0., 1.]])/r
deriv_fd = np.zeros((1, 2, 2))
deriv_fd[0] = (k.calc_r(x, y, params) -
k.calc_r(x, y, params - np.array([dx, 0.])))/dx
assert_allclose(k.calc_drdtheta(x, y, params), deriv)
assert_allclose(k.calc_drdtheta(x, y, params), deriv_fd, rtol = 1.e-5)
def test_calc_drdtheta_failures():
"test situations where calc_drdtheta should fail"
k = Kernel()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.calc_drdtheta(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.calc_drdtheta(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_drdtheta(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_drdtheta(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_drdtheta(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_drdtheta(x, y, params)
def test_calc_d2rdtheta2():
"test calc_d2rdtheta2 function"
k = Kernel()
dx = 1.e-6
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
r = np.array([[1., 2.], [1., 1.]])
deriv = np.zeros((1, 1, 2, 2))
deriv[0, 0] = (0.5*np.array([[1., 4.], [0., 1.]])/r -
0.25*np.array([[1., 4.], [0., 1.]])**2/r**3)
deriv_fd = np.zeros((1, 1, 2, 2))
deriv_fd[0, 0] = (k.calc_drdtheta(x, y, params)[0] -
k.calc_drdtheta(x, y, params - np.array([dx, 0.]))[0])/dx
assert_allclose(k.calc_d2rdtheta2(x, y, params), deriv)
assert_allclose(k.calc_d2rdtheta2(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
r = np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]])
deriv = np.zeros((2, 2, 2, 2))
x12 = np.array([[1., 4.], [0., 1.]])
x22 = np.array([[4., 1.], [1., 4.]])
deriv[0, 0] = 0.5*x12/r-0.25*x12*x12/r**3
deriv[0, 1] = -0.25*x12*x22/r**3
deriv[1, 0] = -0.25*x22*x12/r**3
deriv[1, 1] = 0.5*x22/r-0.25*x22*x22/r**3
deriv_fd = np.zeros((2, 2, 2, 2))
deriv_fd[0, 0] = (k.calc_drdtheta(x, y, params)[0] -
k.calc_drdtheta(x, y, params - np.array([dx, 0., 0.]))[0])/dx
deriv_fd[0, 1] = (k.calc_drdtheta(x, y, params)[1] -
k.calc_drdtheta(x, y, params - np.array([dx, 0., 0.]))[1])/dx
deriv_fd[1, 0] = (k.calc_drdtheta(x, y, params)[0] -
k.calc_drdtheta(x, y, params - np.array([0., dx, 0.]))[0])/dx
deriv_fd[1, 1] = (k.calc_drdtheta(x, y, params)[1] -
k.calc_drdtheta(x, y, params - np.array([0., dx, 0.]))[1])/dx
assert_allclose(k.calc_d2rdtheta2(x, y, params), deriv)
assert_allclose(k.calc_d2rdtheta2(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), 0.])
r = np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])
deriv = np.zeros((2, 2, 2, 2))
x12 = np.array([[1., 4.], [0., 1.]])
x22 = np.array([[4., 1.], [1., 4.]])
deriv[0, 0] = 2.*0.5*x12/r-2.*2.*0.25*x12**2/r**3
deriv[0, 1] = -2.*4.*0.25*x12*x22/r**3
deriv[1, 0] = -4.*2.*0.25*x22*x12/r**3
deriv[1, 1] = 4.*0.5*x22/r-4.*4.*0.25*x22*x22/r**3
deriv_fd = np.zeros((2, 2, 2, 2))
deriv_fd[0, 0] = (k.calc_drdtheta(x, y, params)[0] -
k.calc_drdtheta(x, y, params - np.array([dx, 0., 0.]))[0])/dx
deriv_fd[0, 1] = (k.calc_drdtheta(x, y, params)[1] -
k.calc_drdtheta(x, y, params - np.array([dx, 0., 0.]))[1])/dx
deriv_fd[1, 0] = (k.calc_drdtheta(x, y, params)[0] -
k.calc_drdtheta(x, y, params - np.array([0., dx, 0.]))[0])/dx
deriv_fd[1, 1] = (k.calc_drdtheta(x, y, params)[1] -
k.calc_drdtheta(x, y, params - np.array([0., dx, 0.]))[1])/dx
assert_allclose(k.calc_d2rdtheta2(x, y, params), deriv)
assert_allclose(k.calc_d2rdtheta2(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
r = np.array([[1., 2.], [1., 1.]])
deriv = np.zeros((1, 1, 2, 2))
deriv[0, 0] = (0.5*np.array([[1., 4.], [0., 1.]])/r -
0.25*np.array([[1., 4.], [0., 1.]])**2/r**3)
deriv_fd = np.zeros((1, 1, 2, 2))
deriv_fd[0, 0] = (k.calc_drdtheta(x, y, params)[0] -
k.calc_drdtheta(x, y, params - np.array([dx, 0.]))[0])/dx
assert_allclose(k.calc_d2rdtheta2(x, y, params), deriv)
assert_allclose(k.calc_d2rdtheta2(x, y, params), deriv_fd, rtol = 1.e-5)
def test_calc_d2rdtheta2_failures():
"test situations where calc_d2rdtheta2 should fail"
k = Kernel()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.calc_d2rdtheta2(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.calc_d2rdtheta2(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_d2rdtheta2(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_d2rdtheta2(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_d2rdtheta2(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_d2rdtheta2(x, y, params)
def test_kernel_calc_drdx():
"test the calc_drdx method of the kernel class"
k = Kernel()
dx = 1.e-6
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
r = np.array([[1., 2.], [1., 1.]])
deriv = np.zeros((1, 2, 2))
deriv[0] = -np.array([[1., 2.], [0., 1.]])/r
deriv_fd = np.zeros((1, 2, 2))
# need to use central differences here as derivative is discontiuous at zero
deriv_fd[0] = (k.calc_r(x + dx, y, params) - k.calc_r(x - dx, y, params))/dx/2.
assert_allclose(k.calc_drdx(x, y, params), deriv)
assert_allclose(k.calc_drdx(x, y, params), deriv_fd, rtol = 1.e-5, atol = 1.e-8)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
r = np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]])
deriv = np.zeros((2, 2, 2))
deriv[0] = -np.array([[1., 2.], [0., 1.]])/r
deriv[1] = np.array([[-2., 1.], [-1., 2.]])/r
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.calc_r(x + np.array([[dx, 0.], [dx, 0.]]), y, params) -
k.calc_r(x - np.array([[dx, 0.], [dx, 0.]]), y, params))/dx/2.
deriv_fd[1] = (k.calc_r(x + np.array([[0., dx], [0., dx]]), y, params) -
k.calc_r(x - np.array([[0., dx], [0., dx]]), y, params))/dx/2.
assert_allclose(k.calc_drdx(x, y, params), deriv)
assert_allclose(k.calc_drdx(x, y, params), deriv_fd, rtol = 1.e-5, atol = 1.e-7)
deriv = np.zeros((2, 2, 2))
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), 0.])
r = np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])
deriv = np.zeros((2, 2, 2))
deriv[0] = 2.*np.array([[-1., -2.], [0., -1.]])/r
deriv[1] = 4.*np.array([[-2., 1.], [-1., 2.]])/r
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.calc_r(x + np.array([[dx, 0.], [dx, 0.]]), y, params) -
k.calc_r(x - np.array([[dx, 0.], [dx, 0.]]), y, params))/dx/2.
deriv_fd[1] = (k.calc_r(x + np.array([[0., dx], [0., dx]]), y, params) -
k.calc_r(x - np.array([[0., dx], [0., dx]]), y, params))/dx/2.
assert_allclose(k.calc_drdx(x, y, params), deriv)
assert_allclose(k.calc_drdx(x, y, params), deriv_fd, rtol = 1.e-5, atol = 1.e-7)
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
r = np.array([[1., 2.], [1., 1.]])
deriv = np.zeros((1, 2, 2))
deriv[0] = -np.array([[1., 2.], [0., 1.]])/r
deriv_fd = np.zeros((1, 2, 2))
# need to use central differences here as derivative is discontiuous at zero
deriv_fd[0] = (k.calc_r(x + dx, y, params) - k.calc_r(x - dx, y, params))/dx/2.
assert_allclose(k.calc_drdx(x, y, params), deriv)
assert_allclose(k.calc_drdx(x, y, params), deriv_fd, rtol = 1.e-5, atol = 1.e-8)
def test_kernel_calc_drdx_failures():
"test situations where calc_drdx should fail"
k = Kernel()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.calc_drdx(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.calc_drdx(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_drdx(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_drdx(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_drdx(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.calc_drdx(x, y, params)
def test_squared_exponential_K():
"test squared exponential K(r) function"
k = SquaredExponential()
assert_allclose(k.calc_K(1.), np.exp(-0.5))
assert_allclose(k.calc_K(np.array([[1., 2.], [3., 4.]])),
np.exp(-0.5*np.array([[1., 4.], [9., 16.]])))
with pytest.raises(AssertionError):
k.calc_K(-1.)
def test_squared_exponential_dKdr():
"test squared exponential dK/dr function"
k = SquaredExponential()
dx = 1.e-6
assert_allclose(k.calc_dKdr(1.), -np.exp(-0.5))
assert_allclose(k.calc_dKdr(1.), (k.calc_K(1.)-k.calc_K(1.-dx))/dx, rtol = 1.e-5)
r = np.array([[1., 2.], [3., 4.]])
assert_allclose(k.calc_dKdr(r), -r*np.exp(-0.5*r**2))
assert_allclose(k.calc_dKdr(r), (k.calc_K(r)-k.calc_K(r-dx))/dx, rtol = 1.e-5)
with pytest.raises(AssertionError):
k.calc_dKdr(-1.)
def test_squared_exponential_d2Kdr2():
"test squared exponential d2K/dr2 function"
k = SquaredExponential()
dx = 1.e-6
assert_allclose(k.calc_d2Kdr2(1.), 0.)
assert_allclose(k.calc_d2Kdr2(1.),
(k.calc_dKdr(1.)-k.calc_dKdr(1.-dx))/dx, atol = 1.e-5)
r = np.array([[1., 2.], [3., 4.]])
assert_allclose(k.calc_d2Kdr2(r), (r**2 - 1.)*np.exp(-0.5*r**2))
assert_allclose(k.calc_d2Kdr2(r),
(k.calc_dKdr(r)-k.calc_dKdr(r-dx))/dx, rtol = 1.e-5, atol = 1.e-5)
with pytest.raises(AssertionError):
k.calc_d2Kdr2(-1.)
def test_squared_exponential():
"test squared exponential covariance kernel"
k = SquaredExponential()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
assert_allclose(k.kernel_f(x, y, params),
np.exp(-0.5*np.array([[1., 2.], [0., 1.]])**2))
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
assert_allclose(k.kernel_f(x, y, params),
np.exp(-0.5*np.array([[np.sqrt(5.), np.sqrt(5.)],
[1., np.sqrt(5.)]])**2))
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), np.log(2.)])
assert_allclose(k.kernel_f(x, y, params),
2.*np.exp(-0.5*np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])**2))
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
assert_allclose(k.kernel_f(x, y, params),
np.exp(-0.5*np.array([[1., 2.], [0., 1.]])**2))
def test_squared_exponential_failures():
"test scenarios where squared_exponential should raise an exception"
k = SquaredExponential()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.kernel_f(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.kernel_f(x, y, params)
def test_squared_exponential_deriv():
"test the computation of the gradient of the squared exponential kernel"
k = SquaredExponential()
dx = 1.e-6
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
deriv = np.zeros((2, 2, 2))
deriv[-1] = np.exp(-0.5*np.array([[1., 2.], [0., 1.]])**2)
deriv[0] = (-0.5*np.array([[1., 4.], [0., 1.]])*
np.exp(-0.5*np.array([[1., 2.], [0., 1.]])**2))
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([dx, 0.])))/dx
deriv_fd[1] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., dx])))/dx
assert_allclose(k.kernel_deriv(x, y, params), deriv)
assert_allclose(k.kernel_deriv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
deriv = np.zeros((3, 2, 2))
deriv[-1] = np.exp(-0.5*np.array([[np.sqrt(5.), np.sqrt(5.)],
[1., np.sqrt(5.)]])**2)
deriv[0] = -0.5*np.array([[1., 4.], [0., 1.]])*deriv[-1]
deriv[1] = -0.5*np.array([[4., 1.], [1., 4.]])*deriv[-1]
deriv_fd = np.zeros((3, 2, 2))
deriv_fd[0] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([dx, 0., 0.])))/dx
deriv_fd[1] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., dx, 0.])))/dx
deriv_fd[2] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., 0., dx])))/dx
assert_allclose(k.kernel_deriv(x, y, params), deriv)
assert_allclose(k.kernel_deriv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), np.log(2.)])
deriv = np.zeros((3, 2, 2))
deriv[-1] = 2.*np.exp(-0.5*np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])**2)
deriv[0] = -0.5*np.array([[2., 8.], [0., 2.]])*deriv[-1]
deriv[1] = -0.5*np.array([[16., 4.], [4., 16.]])*deriv[-1]
deriv_fd = np.zeros((3, 2, 2))
deriv_fd[0] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([dx, 0., 0.])))/dx
deriv_fd[1] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., dx, 0.])))/dx
deriv_fd[2] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., 0., dx])))/dx
assert_allclose(k.kernel_deriv(x, y, params), deriv)
assert_allclose(k.kernel_deriv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
deriv = np.zeros((2, 2, 2))
deriv[-1] = np.exp(-0.5*np.array([[1., 2.], [0., 1.]])**2)
deriv[0] = (-0.5*np.array([[1., 4.],[0., 1.]])*
np.exp(-0.5*np.array([[1., 2.], [0., 1.]])**2))
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([dx, 0.])))/dx
deriv_fd[1] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., dx])))/dx
assert_allclose(k.kernel_deriv(x, y, params), deriv)
assert_allclose(k.kernel_deriv(x, y, params), deriv_fd, rtol = 1.e-5)
def test_squared_exponential_deriv_failures():
"test scenarios where squared_exponential should raise an exception"
k = SquaredExponential()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
def test_squared_exponential_hessian():
"test the function to compute the squared exponential hessian"
k = SquaredExponential()
dx = 1.e-6
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
hess = np.zeros((2, 2, 2, 2))
r2 = np.array([[1., 4.], [0., 1.]])
hess[0, 0] = (-0.5*r2+0.25*r2**2)*np.exp(-0.5*r2)
hess[0, 1] = -0.5*np.exp(-0.5*r2)*r2
hess[1, 0] = -0.5*np.exp(-0.5*r2)*r2
hess[1, 1] = np.exp(-0.5*r2)
hess_fd = np.zeros((2, 2, 2, 2))
hess_fd[0, 0] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([dx, 0.]))[0])/dx
hess_fd[1, 0] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([dx, 0.]))[1])/dx
hess_fd[0, 1] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., dx]))[0])/dx
hess_fd[1, 1] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., dx]))[1])/dx
assert_allclose(k.kernel_hessian(x, y, params), hess)
assert_allclose(k.kernel_hessian(x, y, params), hess_fd, atol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
hess = np.zeros((3, 3, 2, 2))
r2 = np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]])**2
x12 = np.array([[1., 4.],[0., 1.]])
x22 = np.array([[4., 1.],[1., 4.]])
hess[0, 0] = (-0.5*x12+0.25*x12**2)*np.exp(-0.5*r2)
hess[0, 1] = 0.25*np.exp(-0.5*r2)*x12*x22
hess[1, 0] = 0.25*np.exp(-0.5*r2)*x12*x22
hess[1, 1] = (-0.5*x22+0.25*x22**2)*np.exp(-0.5*r2)
hess[0, 2] = -0.5*np.exp(-0.5*r2)*x12
hess[2, 0] = -0.5*np.exp(-0.5*r2)*x12
hess[1, 2] = -0.5*np.exp(-0.5*r2)*x22
hess[2, 1] = -0.5*np.exp(-0.5*r2)*x22
hess[2, 2] = np.exp(-0.5*r2)
hess_fd = np.zeros((3, 3, 2, 2))
hess_fd[0, 0] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[0])/dx
hess_fd[1, 0] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[1])/dx
hess_fd[0, 1] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[0])/dx
hess_fd[1, 1] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[1])/dx
hess_fd[0, 2] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[0])/dx
hess_fd[2, 0] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[2])/dx
hess_fd[2, 1] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[2])/dx
hess_fd[1, 2] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[1])/dx
hess_fd[2, 2] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[2])/dx
assert_allclose(k.kernel_hessian(x, y, params), hess)
assert_allclose(k.kernel_hessian(x, y, params), hess_fd, atol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), np.log(2.)])
hess = np.zeros((3, 3, 2, 2))
r2 = np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])**2
x12 = np.array([[1., 4.],[0., 1.]])
x22 = np.array([[4., 1.],[1., 4.]])
hess[0, 0] = (-0.5*x12+0.25*x12**2)*2.*2.*2.*np.exp(-0.5*r2)
hess[0, 1] = 0.25*2.*4.*2.*np.exp(-0.5*r2)*x12*x22
hess[1, 0] = 0.25*np.exp(-0.5*r2)*x12*x22
hess[1, 1] = (-0.5*x22+0.25*x22**2)*np.exp(-0.5*r2)
hess[0, 2] = -0.5*np.exp(-0.5*r2)*x12
hess[2, 0] = -0.5*np.exp(-0.5*r2)*x12
hess[1, 2] = -0.5*2.*4.*np.exp(-0.5*r2)*x22
hess[2, 1] = -0.5*2.*4.*np.exp(-0.5*r2)*x22
hess[2, 2] = 2.*np.exp(-0.5*r2)
hess_fd = np.zeros((3, 3, 2, 2))
hess_fd[0, 0] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[0])/dx
hess_fd[1, 0] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[1])/dx
hess_fd[0, 1] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[0])/dx
hess_fd[1, 1] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[1])/dx
hess_fd[0, 2] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[0])/dx
hess_fd[2, 0] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[2])/dx
hess_fd[2, 1] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[2])/dx
hess_fd[1, 2] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[1])/dx
hess_fd[2, 2] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[2])/dx
assert_allclose(k.kernel_hessian(x, y, params)[1,2], hess[1,2])
assert_allclose(k.kernel_hessian(x, y, params), hess_fd, atol = 1.e-5)
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
hess = np.zeros((2, 2, 2, 2))
r2 = np.array([[1., 4.], [0., 1.]])
hess[0, 0] = (-0.5*r2+0.25*r2**2)*np.exp(-0.5*r2)
hess[0, 1] = -0.5*np.exp(-0.5*r2)*r2
hess[1, 0] = -0.5*np.exp(-0.5*r2)*r2
hess[1, 1] = np.exp(-0.5*r2)
hess_fd = np.zeros((2, 2, 2, 2))
hess_fd[0, 0] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([dx, 0.]))[0])/dx
hess_fd[1, 0] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([dx, 0.]))[1])/dx
hess_fd[0, 1] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., dx]))[0])/dx
hess_fd[1, 1] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., dx]))[1])/dx
assert_allclose(k.kernel_hessian(x, y, params), hess)
assert_allclose(k.kernel_hessian(x, y, params), hess_fd, atol = 1.e-5)
def test_squared_exponential_hessian_failures():
"test situaitons where squared_exponential_hessian should fail"
k = SquaredExponential()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
def test_squared_exponential_inputderiv():
"test the input derivative method of squared exponential"
k = SquaredExponential()
dx = 1.e-6
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
deriv = np.zeros((1, 2, 2))
r = np.array([[1., 2.], [0., 1.]])
deriv[0] = -r*np.exp(-0.5*r**2)*np.array([[-1., -1.], [0., -1.]])
deriv_fd = np.zeros((1, 2, 2))
deriv_fd[0] = (k.kernel_f(x + dx, y, params) -
k.kernel_f(x - dx, y, params))/dx/2.
assert_allclose(k.kernel_inputderiv(x, y, params), deriv)
assert_allclose(k.kernel_inputderiv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
deriv = np.zeros((2, 2, 2))
r = np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]])
deriv[0] = -np.exp(-0.5*r**2)*np.array([[-1., -2.], [ 0., -1.]])
deriv[1] = -np.exp(-0.5*r**2)*np.array([[-2., 1.], [-1., 2.]])
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.kernel_f(x + np.array([[dx, 0.], [dx, 0.]]), y, params) -
k.kernel_f(x - np.array([[dx, 0.], [dx, 0.]]), y, params))/dx/2.
deriv_fd[1] = (k.kernel_f(x + np.array([[0., dx], [0., dx]]), y, params) -
k.kernel_f(x - np.array([[0., dx], [0., dx]]), y, params))/dx/2.
assert_allclose(k.kernel_inputderiv(x, y, params), deriv)
assert_allclose(k.kernel_inputderiv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), np.log(2.)])
deriv = np.zeros((2, 2, 2))
r = np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])
deriv[0] = (-2.*2.*np.exp(-0.5*r**2)*
np.array([[-1., -2.], [0., -1.]]))
deriv[1] = (-2.*4.*np.exp(-0.5*r**2)*
np.array([[-2., 1.], [-1., 2.]]))
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.kernel_f(x + np.array([[dx, 0.], [dx, 0.]]), y, params) -
k.kernel_f(x - np.array([[dx, 0.], [dx, 0.]]), y, params))/dx/2.
deriv_fd[1] = (k.kernel_f(x + np.array([[0., dx], [0., dx]]), y, params) -
k.kernel_f(x - np.array([[0., dx], [0., dx]]), y, params))/dx/2.
assert_allclose(k.kernel_inputderiv(x, y, params), deriv)
assert_allclose(k.kernel_inputderiv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
deriv = np.zeros((1, 2, 2))
r = np.array([[1., 2.], [0., 1.]])
deriv[0] = -r*np.exp(-0.5*r**2)*np.array([[-1., -1.], [0., -1.]])
deriv_fd = np.zeros((1, 2, 2))
deriv_fd[0] = (k.kernel_f(x + dx, y, params)-k.kernel_f(x - dx, y, params))/dx/2.
assert_allclose(k.kernel_inputderiv(x, y, params), deriv)
assert_allclose(k.kernel_inputderiv(x, y, params), deriv_fd, rtol = 1.e-5)
def test_squared_exponential_inputderiv_failures():
"test situations where input derivative method should fail"
k = SquaredExponential()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
def test_matern_5_2_K():
"test matern 5/2 K(r) function"
k = Matern52()
assert_allclose(k.calc_K(1.), (1.+np.sqrt(5.)+5./3.)*np.exp(-np.sqrt(5.)))
r = np.array([[1., 2.], [3., 4.]])
assert_allclose(k.calc_K(r), (1.+np.sqrt(5.)*r+5./3.*r**2)*np.exp(-np.sqrt(5.)*r))
with pytest.raises(AssertionError):
k.calc_K(-1.)
def test_matern_5_2_dKdr():
"test matern 5/2 dK/dr function"
k = Matern52()
dx = 1.e-6
assert_allclose(k.calc_dKdr(1.), -5./3.*(1.+np.sqrt(5.))*np.exp(-np.sqrt(5.)))
assert_allclose(k.calc_dKdr(1.), (k.calc_K(1.)-k.calc_K(1.-dx))/dx, rtol = 1.e-5)
r = np.array([[1., 2.], [3., 4.]])
assert_allclose(k.calc_dKdr(r), -5./3.*r*(1.+np.sqrt(5.)*r)*np.exp(-np.sqrt(5.)*r))
assert_allclose(k.calc_dKdr(r), (k.calc_K(r)-k.calc_K(r - dx))/dx, rtol = 1.e-5)
with pytest.raises(AssertionError):
k.calc_dKdr(-1.)
def test_matern_5_2_d2Kdr2():
"test squared exponential d2K/dr2 function"
k = Matern52()
dx = 1.e-6
assert_allclose(k.calc_d2Kdr2(1.), 5./3.*(5.-np.sqrt(5.)-1.)*np.exp(-np.sqrt(5.)))
assert_allclose(k.calc_d2Kdr2(1.), (k.calc_dKdr(1.)-k.calc_dKdr(1.-dx))/dx, rtol = 1.e-5)
r = np.array([[1., 2.], [3., 4.]])
assert_allclose(k.calc_d2Kdr2(r), 5./3.*(5.*r**2-np.sqrt(5.)*r-1.)*np.exp(-np.sqrt(5.)*r))
assert_allclose(k.calc_d2Kdr2(r), (k.calc_dKdr(r)-k.calc_dKdr(r - dx))/dx, rtol = 1.e-5)
with pytest.raises(AssertionError):
k.calc_d2Kdr2(-1.)
def test_matern_5_2():
"test matern 5/2 covariance kernel"
k = Matern52()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
D = np.array([[1., 2.], [0., 1.]])
assert_allclose(k.kernel_f(x, y, params),
(1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D))
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
D = np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]])
assert_allclose(k.kernel_f(x, y, params),
(1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D))
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), np.log(2.)])
D = np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])
assert_allclose(k.kernel_f(x, y, params),
2.*(1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D))
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
D = np.array([[1., 2.], [0., 1.]])
assert_allclose(k.kernel_f(x, y, params),
(1.+np.sqrt(5.)*D + 5./3.*D**2)*np.exp(-np.sqrt(5.)*D))
def test_matern_5_2_failures():
"test scenarios where matern_5_2 should raise an exception"
k = Matern52()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.kernel_f(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.kernel_f(x, y, params)
def test_matern_5_2_deriv():
"test computing the gradient of the matern 5/2 kernel"
k = Matern52()
dx = 1.e-6
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
deriv = np.zeros((2, 2, 2))
D = np.array([[1., 2.], [0., 1.]])
deriv[-1] = (1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D)
deriv[0] = -0.5*D**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([dx, 0.])))/dx
deriv_fd[1] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., dx])))/dx
assert_allclose(k.kernel_deriv(x, y, params), deriv)
assert_allclose(k.kernel_deriv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
D = np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]])
D1 = np.array([[1., 2.], [0., 1.]])
D2 = np.array([[2., 1.], [1., 2.]])
deriv = np.zeros((3, 2, 2))
deriv[-1] = (1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D)
deriv[0] = -0.5*D1**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
deriv[1] = -0.5*D2**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
deriv_fd = np.zeros((3, 2, 2))
deriv_fd[0] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([dx, 0., 0.])))/dx
deriv_fd[1] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., dx, 0.])))/dx
deriv_fd[2] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., 0., dx])))/dx
assert_allclose(k.kernel_deriv(x, y, params), deriv)
assert_allclose(k.kernel_deriv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), np.log(2.)])
D = np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])
D1 = np.array([[1., 2.], [0., 1.]])
D2 = np.array([[2., 1.], [1., 2.]])
deriv = np.zeros((3, 2, 2))
deriv[-1] = 2.*(1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D)
deriv[0] = -0.5*2.*2.*D1**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
deriv[1] = -0.5*2.*4.*D2**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
deriv_fd = np.zeros((3, 2, 2))
deriv_fd[0] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([dx, 0., 0.])))/dx
deriv_fd[1] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., dx, 0.])))/dx
deriv_fd[2] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0., 0., dx])))/dx
assert_allclose(k.kernel_deriv(x, y, params), deriv)
assert_allclose(k.kernel_deriv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
deriv = np.zeros((2, 2, 2))
D = np.array([[1., 2.], [0., 1.]])
deriv[-1] = (1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D)
deriv[0] = -0.5*D**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([dx, 0.])))/dx
deriv_fd[1] = (k.kernel_f(x, y, params) -
k.kernel_f(x, y, params - np.array([0, dx])))/dx
assert_allclose(k.kernel_deriv(x, y, params), deriv)
assert_allclose(k.kernel_deriv(x, y, params), deriv_fd, rtol = 1.e-5)
def test_matern_5_2_deriv_failures():
"test scenarios where matern_5_2_deriv should raise an exception"
k = Matern52()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_deriv(x, y, params)
def test_matern_5_2_hessian():
"test the function to compute the squared exponential hessian"
k = Matern52()
dx = 1.e-6
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
D = np.array([[1., 2.], [0., 1.]])
hess = np.zeros((2, 2, 2, 2))
hess[0, 0] = 5./3.*np.exp(-np.sqrt(5.)*D)*(5./4.*D**4-(1.+np.sqrt(5.)*D)*D**2/2.)
hess[0, 1] = -0.5*D**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[1, 0] = -0.5*D**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[1, 1] = (1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D)
hess_fd = np.zeros((2, 2, 2, 2))
hess_fd[0, 0] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([dx, 0.]))[0])/dx
hess_fd[0, 1] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., dx]))[0])/dx
hess_fd[1, 0] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([dx, 0.]))[1])/dx
hess_fd[1, 1] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., dx]))[1])/dx
assert_allclose(k.kernel_hessian(x, y, params), hess)
assert_allclose(k.kernel_hessian(x, y, params), hess_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
D = np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]])
D1 = np.array([[1., 2.], [0., 1.]])
D2 = np.array([[2., 1.], [1., 2.]])
hess = np.zeros((3, 3, 2, 2))
hess[0, 0] = 5./3.*np.exp(-np.sqrt(5.)*D)*(5./4.*D1**4-(1.+np.sqrt(5.)*D)*D1**2/2.)
hess[0, 1] = 5./3.*np.exp(-np.sqrt(5.)*D)*(5./4.*D1**2*D2**2)
hess[1, 0] = 5./3.*np.exp(-np.sqrt(5.)*D)*(5./4.*D1**2*D2**2)
hess[1, 1] = 5./3.*np.exp(-np.sqrt(5.)*D)*(5./4.*D2**4-(1.+np.sqrt(5.)*D)*D2**2/2.)
hess[0, 2] = -0.5*D1**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[2, 0] = -0.5*D1**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[1, 2] = -0.5*D2**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[2, 1] = -0.5*D2**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[2, 2] = (1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D)
hess_fd = np.zeros((3, 3, 2, 2))
hess_fd[0, 0] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[0])/dx
hess_fd[0, 1] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[0])/dx
hess_fd[1, 0] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[1])/dx
hess_fd[1, 1] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[1])/dx
hess_fd[0, 2] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[0])/dx
hess_fd[2, 0] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[2])/dx
hess_fd[1, 2] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[1])/dx
hess_fd[2, 1] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[2])/dx
hess_fd[2, 2] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[2])/dx
assert_allclose(k.kernel_hessian(x, y, params), hess)
assert_allclose(k.kernel_hessian(x, y, params), hess_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), np.log(2.)])
D = np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])
D1 = np.array([[1., 2.], [0., 1.]])
D2 = np.array([[2., 1.], [1., 2.]])
hess = np.zeros((3, 3, 2, 2))
hess[0, 0] = (5./3.*2.*np.exp(-np.sqrt(5.)*D)*
(5./4.*2.*2.*D1**4 - (1.+np.sqrt(5.)*D)*2.*D1**2/2.))
hess[0, 1] = 5./3.*2.*np.exp(-np.sqrt(5.)*D)*(5./4.*2.*4.*D1**2*D2**2)
hess[1, 0] = 5./3.*2.*np.exp(-np.sqrt(5.)*D)*(5./4.*2.*4.*D1**2*D2**2)
hess[1, 1] = (5./3.*2.*np.exp(-np.sqrt(5.)*D)*(5./4.*4.*4.*D2**4 -
(1. + np.sqrt(5.)*D)*4.*D2**2/2.))
hess[0, 2] = -0.5*2.*2.*D1**2*5./3.*(1. + np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[2, 0] = -0.5*2.*2.*D1**2*5./3.*(1. + np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[1, 2] = -0.5*2.*4.*D2**2*5./3.*(1. + np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[2, 1] = -0.5*2.*4.*D2**2*5./3.*(1. + np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[2, 2] = 2.*(1. + np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D)
hess_fd = np.zeros((3, 3, 2, 2))
hess_fd[0, 0] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[0])/dx
hess_fd[0, 1] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[0])/dx
hess_fd[1, 0] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[1])/dx
hess_fd[1, 1] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[1])/dx
hess_fd[0, 2] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[0])/dx
hess_fd[2, 0] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([dx, 0., 0.]))[2])/dx
hess_fd[1, 2] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[1])/dx
hess_fd[2, 1] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([0., dx, 0.]))[2])/dx
hess_fd[2, 2] = (k.kernel_deriv(x, y, params)[2] -
k.kernel_deriv(x, y, params-np.array([0., 0., dx]))[2])/dx
assert_allclose(k.kernel_hessian(x, y, params), hess)
assert_allclose(k.kernel_hessian(x, y, params), hess_fd, rtol = 1.e-5)
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
D = np.array([[1., 2.], [0., 1.]])
hess = np.zeros((2, 2, 2, 2))
hess[0, 0] = 5./3.*np.exp(-np.sqrt(5.)*D)*(5./4.*D**4-(1.+np.sqrt(5.)*D)*D**2/2.)
hess[0, 1] = -0.5*D**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[1, 0] = -0.5*D**2*5./3.*(1.+np.sqrt(5.)*D)*np.exp(-np.sqrt(5.)*D)
hess[1, 1] = (1.+np.sqrt(5.)*D+5./3.*D**2)*np.exp(-np.sqrt(5.)*D)
hess_fd = np.zeros((2, 2, 2, 2))
hess_fd[0, 0] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([dx, 0.]))[0])/dx
hess_fd[0, 1] = (k.kernel_deriv(x, y, params)[0] -
k.kernel_deriv(x, y, params-np.array([0., dx]))[0])/dx
hess_fd[1, 0] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([dx, 0.]))[1])/dx
hess_fd[1, 1] = (k.kernel_deriv(x, y, params)[1] -
k.kernel_deriv(x, y, params-np.array([0., dx]))[1])/dx
assert_allclose(k.kernel_hessian(x, y, params), hess)
assert_allclose(k.kernel_hessian(x, y, params), hess_fd, rtol = 1.e-5)
def test_matern_5_2_hessian_failures():
"test situaitons where squared_exponential_hessian should fail"
k = Matern52()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_hessian(x, y, params)
def test_matern_5_2_inputderiv():
"test input derivative method of Matern 5/2 kernel"
k = Matern52()
dx = 1.e-6
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0., 0.])
deriv = np.zeros((1, 2, 2))
r = np.array([[1., 2.], [0., 1.]])
deriv[0] = (-5./3.*r*(1.+np.sqrt(5.)*r)*np.exp(-np.sqrt(5.)*r)
*np.array([[-1., -1.], [0., -1.]]))
deriv_fd = np.zeros((1, 2, 2))
deriv_fd[0] = (k.kernel_f(x + dx, y, params) -
k.kernel_f(x - dx, y, params))/dx/2.
assert_allclose(k.kernel_inputderiv(x, y, params), deriv)
assert_allclose(k.kernel_inputderiv(x, y, params), deriv_fd,
rtol = 1.e-5, atol = 1.e-8)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([0., 0., 0.])
deriv = np.zeros((2, 2, 2))
r = np.array([[np.sqrt(5.), np.sqrt(5.)], [1., np.sqrt(5.)]])
deriv[0] = (-5./3.*(1.+np.sqrt(5.)*r)*np.exp(-np.sqrt(5.)*r)*
np.array([[-1., -2.], [0., -1.]]))
deriv[1] = (-5./3.*(1.+np.sqrt(5.)*r)*np.exp(-np.sqrt(5.)*r)*
np.array([[-2., 1.], [-1., 2.]]))
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.kernel_f(x + np.array([[dx, 0.], [dx, 0.]]), y, params) -
k.kernel_f(x - np.array([[dx, 0.], [dx, 0.]]), y, params))/dx/2.
deriv_fd[1] = (k.kernel_f(x + np.array([[0., dx], [0., dx]]), y, params) -
k.kernel_f(x - np.array([[0., dx], [0., dx]]), y, params))/dx/2.
assert_allclose(k.kernel_inputderiv(x, y, params), deriv)
assert_allclose(k.kernel_inputderiv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([[1., 2.], [2., 3.]])
y = np.array([[2., 4.], [3., 1.]])
params = np.array([np.log(2.), np.log(4.), np.log(2.)])
deriv = np.zeros((2, 2, 2))
r = np.array([[np.sqrt(1.*2.+4.*4.), np.sqrt(4.*2.+1.*4.)],
[np.sqrt(1.*4.), np.sqrt(1.*2.+4.*4.)]])
deriv[0] = (-np.exp(2.)*np.exp(2.)*5./3.*(1.+np.sqrt(5.)*r)*
np.exp(-np.sqrt(5.)*r)*np.array([[-1., -2.], [0., -1.]]))
deriv[1] = (-np.exp(2.)*np.exp(4.)*5./3.*(1.+np.sqrt(5.)*r)*
np.exp(-np.sqrt(5.)*r)*np.exp(-0.5*r**2)*np.array([[-2., 1.], [-1., 2.]]))
deriv_fd = np.zeros((2, 2, 2))
deriv_fd[0] = (k.kernel_f(x + np.array([[dx, 0.], [dx, 0.]]), y, params) -
k.kernel_f(x - np.array([[dx, 0.], [dx, 0.]]), y, params))/dx/2.
deriv_fd[1] = (k.kernel_f(x + np.array([[0., dx], [0., dx]]), y, params) -
k.kernel_f(x - np.array([[0., dx], [0., dx]]), y, params))/dx/2.
#assert_allclose(k.kernel_inputderiv(x, y, params), deriv)
assert_allclose(k.kernel_inputderiv(x, y, params), deriv_fd, rtol = 1.e-5)
x = np.array([1., 2.])
y = np.array([2., 3.])
params = np.array([0., 0.])
deriv = np.zeros((1, 2, 2))
r = np.array([[1., 2.], [0., 1.]])
deriv[0] = (-5./3.*r*(1.+np.sqrt(5.)*r)*
np.exp(-np.sqrt(5.)*r)*np.array([[-1., -1.], [0., -1.]]))
deriv_fd = np.zeros((1, 2, 2))
deriv_fd[0] = (k.kernel_f(x + dx, y, params) -
k.kernel_f(x - dx, y, params))/dx/2.
assert_allclose(k.kernel_inputderiv(x, y, params), deriv)
assert_allclose(k.kernel_inputderiv(x, y, params), deriv_fd,
rtol = 1.e-5, atol = 1.e-8)
def test_matern_5_2_inputderiv_failures():
"test situations where input derivative should fail"
k = Matern52()
x = np.array([[1.], [2.]])
y = np.array([[2.], [3.]])
params = np.array([0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
params = np.array([[0., 0.], [0., 0.]])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[2., 4.], [3., 2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
x = np.array([[1.], [2.]])
y = np.array([[[2.], [4.]], [[3.], [2.]]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
x = np.array([[2., 4.], [3., 2.]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
x = np.array([[[2.], [4.]], [[3.], [2.]]])
y = np.array([[1.], [2.]])
params = np.array([0., 0.])
with pytest.raises(AssertionError):
k.kernel_inputderiv(x, y, params)
def test_Kernel_str():
"test string method of generic Kernel class"
k = Kernel()
assert str(k) == "Stationary Kernel"
def test_SquaredExponential_str():
"test string method of SquaredExponential class"
k = SquaredExponential()
assert str(k) == "Squared Exponential Kernel"
def test_Matern52_str():
"test string method of Matern52 class"
k = Matern52()
assert str(k) == "Matern 5/2 Kernel"
| 35.371554
| 94
| 0.497423
| 9,925
| 56,453
| 2.747305
| 0.010882
| 0.136575
| 0.09506
| 0.077016
| 0.959915
| 0.946419
| 0.940441
| 0.927678
| 0.920673
| 0.913008
| 0
| 0.080369
| 0.232547
| 56,453
| 1,596
| 95
| 35.371554
| 0.54899
| 0.032275
| 0
| 0.853219
| 0
| 0
| 0.029194
| 0.00096
| 0
| 0
| 0
| 0
| 0.151931
| 1
| 0.028326
| false
| 0
| 0.003433
| 0
| 0.03176
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbf59831562eb6fe2ed7289568bda2e52a21637c
| 129
|
py
|
Python
|
providers/__init__.py
|
prezesp/scoop-viewer
|
115f413979ba2e4e766e334f0240082a9343e314
|
[
"MIT"
] | 86
|
2018-07-17T14:21:05.000Z
|
2022-03-29T03:00:40.000Z
|
providers/__init__.py
|
prezesp/scoop-viewer
|
115f413979ba2e4e766e334f0240082a9343e314
|
[
"MIT"
] | 16
|
2018-04-24T22:45:24.000Z
|
2021-12-15T08:37:38.000Z
|
providers/__init__.py
|
prezesp/scoop-viewer
|
115f413979ba2e4e766e334f0240082a9343e314
|
[
"MIT"
] | 5
|
2018-03-28T18:24:52.000Z
|
2022-01-08T11:28:31.000Z
|
from providers.scoop_provider import ScoopProvider, ScoopNotInstalled
from providers.scoop_mock_provider import ScoopMockProvider
| 64.5
| 69
| 0.914729
| 14
| 129
| 8.214286
| 0.642857
| 0.226087
| 0.313043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062016
| 129
| 2
| 70
| 64.5
| 0.950413
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e06875fec176f34b26d962fd95c9459bc476122f
| 161
|
py
|
Python
|
ui/__init__.py
|
ChiaN-Yang/QMeas
|
f7003b5c66530beefc4eadd3740acc9023a24b60
|
[
"MIT"
] | null | null | null |
ui/__init__.py
|
ChiaN-Yang/QMeas
|
f7003b5c66530beefc4eadd3740acc9023a24b60
|
[
"MIT"
] | null | null | null |
ui/__init__.py
|
ChiaN-Yang/QMeas
|
f7003b5c66530beefc4eadd3740acc9023a24b60
|
[
"MIT"
] | null | null | null |
from .main_window_qt import Ui_MainWindow as Qt_Window
from .control_option import Ui_Dialog as Control_Window
from .read_option import Ui_Dialog as Read_Window
| 40.25
| 55
| 0.869565
| 28
| 161
| 4.642857
| 0.428571
| 0.184615
| 0.215385
| 0.307692
| 0.338462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111801
| 161
| 3
| 56
| 53.666667
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e06b5d328dc1d24fe5d9138e0bda655d7dbc79af
| 245,823
|
py
|
Python
|
tests/test_generate_switch_config_aruba_csm_1_2.py
|
Cray-HPE/canu
|
3a92ce1e9b63f35aa30b9135afaa734e61909407
|
[
"MIT"
] | 6
|
2021-09-16T22:02:48.000Z
|
2022-02-04T18:08:57.000Z
|
tests/test_generate_switch_config_aruba_csm_1_2.py
|
Cray-HPE/canu
|
3a92ce1e9b63f35aa30b9135afaa734e61909407
|
[
"MIT"
] | 57
|
2021-09-17T17:15:59.000Z
|
2022-03-31T20:56:21.000Z
|
tests/test_generate_switch_config_aruba_csm_1_2.py
|
Cray-HPE/canu
|
3a92ce1e9b63f35aa30b9135afaa734e61909407
|
[
"MIT"
] | 4
|
2022-01-06T17:09:02.000Z
|
2022-02-04T18:09:33.000Z
|
# MIT License
#
# (C) Copyright [2022] Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
"""Test CANU generate switch config commands."""
import json
from os import path
from pathlib import Path
from click import testing
import requests
import responses
from canu.cli import cli
test_file_directory = Path(__file__).resolve().parent
test_file_name = "Full_Architecture_Golden_Config_1.1.5.xlsx"
test_file = path.join(test_file_directory, "data", test_file_name)
custom_file_name = "aruba_custom.yaml"
custom_file = path.join(test_file_directory, "data", custom_file_name)
architecture = "full"
tabs = "SWITCH_TO_SWITCH,NON_COMPUTE_NODES,HARDWARE_MANAGEMENT,COMPUTE_NODES"
corners = "J14,T44,J14,T53,J14,T34,J14,T27"
sls_file = "sls_file.json"
csm = "1.2"
switch_name = "sw-spine-001"
cache_minutes = 0
sls_address = "api-gw-service-nmn.local"
test_file_name_tds = "TDS_Architecture_Golden_Config_1.1.5.xlsx"
test_file_tds = path.join(test_file_directory, "data", test_file_name_tds)
architecture_tds = "TDS"
tabs_tds = "SWITCH_TO_SWITCH,NON_COMPUTE_NODES,HARDWARE_MANAGEMENT,COMPUTE_NODES"
corners_tds = "J14,T30,J14,T53,J14,T32,J14,T27"
canu_version_file = path.join(test_file_directory.resolve().parent, "canu", ".version")
with open(canu_version_file, "r") as file:
canu_version = file.readline()
canu_version = canu_version.strip()
banner_motd = (
"banner exec !\n"
"###############################################################################\n"
f"# CSM version: {csm}\n"
f"# CANU version: {canu_version}\n"
"###############################################################################\n"
"!\n"
)
runner = testing.CliRunner()
def test_switch_config_spine_primary():
"""Test that the `canu generate switch config` command runs and returns valid primary spine config."""
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
switch_name,
],
)
assert result.exit_code == 0
assert "hostname sw-spine-001\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf Customer\n"
+ "vrf keepalive\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
print(result.output)
assert (
"ssh server vrf Customer\n"
+ "ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 7\n"
+ " name CAN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 10\n"
+ " name SUN\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree priority 0\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
sw_spine_to_leaf = (
"interface lag 101 multi-chassis\n"
+ " no shutdown\n"
+ " description spine_to_leaf_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-001:53<==sw-spine-001\n"
+ " lag 101\n"
+ "\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-002:53<==sw-spine-001\n"
+ " lag 101\n"
+ "\n"
+ "interface lag 103 multi-chassis\n"
+ " no shutdown\n"
+ " description spine_to_leaf_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-003:53<==sw-spine-001\n"
+ " lag 103\n"
+ "\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-004:53<==sw-spine-001\n"
+ " lag 103\n"
)
assert sw_spine_to_leaf in str(result.output)
spine_to_cdu = (
"interface lag 201 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-cdu-001:50<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-001:50<==sw-spine-001\n"
+ " lag 201\n"
+ "\n"
+ "interface 1/1/6\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-002:50<==sw-spine-001\n"
+ " lag 201\n"
)
assert spine_to_cdu in str(result.output)
print(result.output)
assert (
"interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
+ "interface 1/1/30\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.0/31\n"
+ "interface 1/1/31\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/32\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "vsx\n"
+ " system-mac 02:00:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role primary\n"
+ " keepalive peer 192.168.255.1 source 192.168.255.0 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.2/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.2/16\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.1.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.2/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.3.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.2/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.0.1\n"
+ " ip helper-address 10.94.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.2/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.12.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "interface vlan 7\n"
+ " vrf attach Customer\n"
+ " description CAN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.11.2/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.11.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "ip dns server-address 10.92.100.225\n"
) in str(result.output)
print(result.output)
assert (
"ip prefix-list pl-cmn seq 10 permit 192.168.12.0/24 ge 24\n"
+ "ip prefix-list pl-can seq 20 permit 192.168.11.0/24 ge 24\n"
+ "ip prefix-list pl-hmn seq 30 permit 10.94.100.0/24 ge 24\n"
+ "ip prefix-list pl-nmn seq 40 permit 10.92.100.0/24 ge 24\n"
+ "ip prefix-list tftp seq 10 permit 10.92.100.60/32 ge 32 le 32\n"
+ "ip prefix-list tftp seq 20 permit 10.94.100.60/32 ge 32 le 32\n"
) in str(result.output)
print(result.output)
assert (
"route-map ncn-w001 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w001 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w001 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w001 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.4\n"
+ "route-map ncn-w001 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.4\n"
+ "\n"
+ "route-map ncn-w001-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.4\n"
+ "route-map ncn-w001-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "\n"
+ "\n"
+ "route-map ncn-w002 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w002 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w002 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w002 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.5\n"
+ "route-map ncn-w002 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.5\n"
+ "\n"
+ "route-map ncn-w002-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.5\n"
+ "route-map ncn-w002-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "\n"
+ "\n"
+ "route-map ncn-w003 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w003 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w003 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w003 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.6\n"
+ "route-map ncn-w003 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.6\n"
+ "\n"
+ "route-map ncn-w003-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.6\n"
+ "route-map ncn-w003-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
) in str(result.output)
print(result.output)
assert (
"router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.2\n"
+ " default-information originate\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.2\n"
+ " redistribute bgp\n"
+ " area 0.0.0.0\n"
+ "\n"
+ "router bgp 65533\n"
+ " bgp router-id 10.2.0.2\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.3.3 remote-as 65533\n"
+ " neighbor 192.168.4.4 remote-as 65531\n"
+ " neighbor 192.168.4.4 passive\n"
+ " neighbor 192.168.4.5 remote-as 65531\n"
+ " neighbor 192.168.4.5 passive\n"
+ " neighbor 192.168.4.6 remote-as 65531\n"
+ " neighbor 192.168.4.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.3.3 activate\n"
+ " neighbor 192.168.4.4 activate\n"
+ " neighbor 192.168.4.4 route-map ncn-w001 in\n"
+ " neighbor 192.168.4.5 activate\n"
+ " neighbor 192.168.4.5 route-map ncn-w002 in\n"
+ " neighbor 192.168.4.6 activate\n"
+ " neighbor 192.168.4.6 route-map ncn-w003 in\n"
+ " exit-address-family\n"
+ " vrf Customer\n"
+ " bgp router-id 10.2.0.2\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.12.3 remote-as 65533\n"
+ " neighbor 192.168.12.4 remote-as 65532\n"
+ " neighbor 192.168.12.4 passive\n"
+ " neighbor 192.168.12.5 remote-as 65532\n"
+ " neighbor 192.168.12.5 passive\n"
+ " neighbor 192.168.12.6 remote-as 65532\n"
+ " neighbor 192.168.12.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.12.3 activate\n"
+ " neighbor 192.168.12.4 activate\n"
+ " neighbor 192.168.12.4 route-map ncn-w001-Customer in\n"
+ " neighbor 192.168.12.5 activate\n"
+ " neighbor 192.168.12.5 route-map ncn-w002-Customer in\n"
+ " neighbor 192.168.12.6 activate\n"
+ " neighbor 192.168.12.6 route-map ncn-w003-Customer in\n"
+ " exit-address-family\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_spine_primary_custom():
"""Test that the `canu generate switch config custom` command runs and returns valid primary spine config."""
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
switch_name,
"--custom-config",
custom_file,
],
)
assert result.exit_code == 0
print(result.output)
assert banner_motd in str(result.output)
print(result.output)
assert (
"# ip route 0.0.0.0/0 10.103.15.185\n"
+ "# interface 1/1/36\n"
+ "# no shutdown\n"
+ "# ip address 10.103.15.186/30\n"
+ "# exit\n"
+ "# interface 1/1/1\n"
+ "# ip address 10.103.15.10/30\n"
+ "# exit\n"
+ "# system interface-group 3 speed 10g\n"
+ "# interface 1/1/20\n"
+ "# no shutdown\n"
+ "# mtu 9198\n"
+ "# description ion-node<==sw-spine-001\n"
+ "# no routing\n"
+ "# vlan access 7\n"
+ "# spanning-tree bpdu-guard\n"
+ "# spanning-tree port-type admin-edge\n"
) in str(result.output)
print(result.output)
assert (
"hostname sw-spine-001\n"
+ "vrf Customer\n"
+ "vrf keepalive\n"
+ "ssh server vrf Customer\n"
+ "ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 7\n"
+ " name CAN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 10\n"
+ " name SUN\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree priority 0\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
sw_spine_to_leaf = (
"interface lag 101 multi-chassis\n"
+ " no shutdown\n"
+ " description spine_to_leaf_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "interface lag 103 multi-chassis\n"
+ " no shutdown\n"
+ " description spine_to_leaf_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "interface lag 201 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-cdu-001:50<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
)
assert sw_spine_to_leaf in str(result.output)
output = (
"no ip icmp redirect\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "route-map ncn-w001 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w001 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w001 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w001 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.4\n"
+ "route-map ncn-w001 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.4\n"
+ "route-map ncn-w001-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.4\n"
+ "route-map ncn-w001-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "route-map ncn-w002 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w002 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w002 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w002 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.5\n"
+ "route-map ncn-w002 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.5\n"
+ "route-map ncn-w002-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.5\n"
+ "route-map ncn-w002-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "route-map ncn-w003 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w003 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w003 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w003 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.6\n"
+ "route-map ncn-w003 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.6\n"
+ "route-map ncn-w003-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.6\n"
+ "route-map ncn-w003-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
)
assert output in str(result.output)
print(result.output)
assert (
"system interface-group 3 speed 10g\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.2/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.2/16\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.1.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.2/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.3.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.2/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.0.1\n"
+ " ip helper-address 10.94.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.2/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.12.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "interface vlan 7\n"
+ " vrf attach Customer\n"
+ " description CAN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.11.2/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.11.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "vsx\n"
+ " system-mac 02:00:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role primary\n"
+ " keepalive peer 192.168.255.1 source 192.168.255.0 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-002:53<==sw-spine-001\n"
+ " lag 101\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-003:53<==sw-spine-001\n"
+ " lag 103\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-004:53<==sw-spine-001\n"
+ " lag 103\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-001:50<==sw-spine-001\n"
+ " lag 201\n"
+ "interface 1/1/6\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-002:50<==sw-spine-001\n"
+ " lag 201\n"
+ "interface 1/1/30\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.0/31\n"
+ "interface 1/1/31\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/32\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/36\n"
+ " no shutdown\n"
+ " ip address 10.103.15.186/30\n"
+ " exit\n"
+ "interface 1/1/1\n"
+ " no shutdown\n"
+ " ip address 10.103.15.10/30\n"
+ " exit\n"
+ "interface 1/1/20\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ion-node<==sw-spine-001\n"
+ " no routing\n"
+ " vlan access 7\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
) in str(result.output)
print(result.output)
assert (
"ip dns server-address 10.92.100.225\n"
+ "ip prefix-list pl-cmn seq 10 permit 192.168.12.0/24 ge 24\n"
+ "ip prefix-list pl-can seq 20 permit 192.168.11.0/24 ge 24\n"
+ "ip prefix-list pl-hmn seq 30 permit 10.94.100.0/24 ge 24\n"
+ "ip prefix-list pl-nmn seq 40 permit 10.92.100.0/24 ge 24\n"
+ "ip prefix-list tftp seq 10 permit 10.92.100.60/32 ge 32 le 32\n"
+ "ip prefix-list tftp seq 20 permit 10.94.100.60/32 ge 32 le 32\n"
+ "ip route 0.0.0.0/0 10.103.15.185\n"
+ "router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.2\n"
+ " default-information originate\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.2\n"
+ " redistribute bgp\n"
+ " area 0.0.0.0\n"
+ "router bgp 65533\n"
+ " bgp router-id 10.2.0.2\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.3.3 remote-as 65533\n"
+ " neighbor 192.168.4.4 remote-as 65531\n"
+ " neighbor 192.168.4.4 passive\n"
+ " neighbor 192.168.4.5 remote-as 65531\n"
+ " neighbor 192.168.4.5 passive\n"
+ " neighbor 192.168.4.6 remote-as 65531\n"
+ " neighbor 192.168.4.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.3.3 activate\n"
+ " neighbor 192.168.4.4 activate\n"
+ " neighbor 192.168.4.4 route-map ncn-w001 in\n"
+ " neighbor 192.168.4.5 activate\n"
+ " neighbor 192.168.4.5 route-map ncn-w002 in\n"
+ " neighbor 192.168.4.6 activate\n"
+ " neighbor 192.168.4.6 route-map ncn-w003 in\n"
+ " exit-address-family\n"
+ " vrf Customer\n"
+ " bgp router-id 10.2.0.2\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.12.3 remote-as 65533\n"
+ " neighbor 192.168.12.4 remote-as 65532\n"
+ " neighbor 192.168.12.4 passive\n"
+ " neighbor 192.168.12.5 remote-as 65532\n"
+ " neighbor 192.168.12.5 passive\n"
+ " neighbor 192.168.12.6 remote-as 65532\n"
+ " neighbor 192.168.12.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.12.3 activate\n"
+ " neighbor 192.168.12.4 activate\n"
+ " neighbor 192.168.12.4 route-map ncn-w001-Customer in\n"
+ " neighbor 192.168.12.5 activate\n"
+ " neighbor 192.168.12.5 route-map ncn-w002-Customer in\n"
+ " neighbor 192.168.12.6 activate\n"
+ " neighbor 192.168.12.6 route-map ncn-w003-Customer in\n"
+ " exit-address-family\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_spine_secondary_custom():
"""Test that the `canu generate switch config custom` command runs and returns valid primary spine config."""
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
spine_secondary = "sw-spine-002"
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
spine_secondary,
"--custom-config",
custom_file,
],
)
assert result.exit_code == 0
assert banner_motd in str(result.output)
print(result.output)
assert (
"# ip route 0.0.0.0/0 10.103.15.189\n"
+ "# interface 1/1/36\n"
+ "# no shutdown\n"
+ "# ip address 10.103.15.190/30\n"
+ "# exit\n"
+ "# system interface-group 3 speed 10g\n"
+ "# interface 1/1/20\n"
+ "# no shutdown\n"
+ "# mtu 9198\n"
+ "# description ion-node<==sw-spine-002\n"
+ "# no routing\n"
+ "# vlan access 7\n"
+ "# spanning-tree bpdu-guard\n"
+ "# spanning-tree port-type admin-edge\n"
) in str(result.output)
print(result.output)
assert (
"hostname sw-spine-002\n"
+ "vrf Customer\n"
+ "vrf keepalive\n"
+ "ssh server vrf Customer\n"
+ "ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 7\n"
+ " name CAN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 10\n"
+ " name SUN\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree priority 0\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
sw_spine_to_leaf = (
"interface lag 101 multi-chassis\n"
+ " no shutdown\n"
+ " description spine_to_leaf_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "interface lag 103 multi-chassis\n"
+ " no shutdown\n"
+ " description spine_to_leaf_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "interface lag 201 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-cdu-001:49<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
)
assert sw_spine_to_leaf in str(result.output)
output = (
"no ip icmp redirect\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "route-map ncn-w001 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w001 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w001 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w001 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.4\n"
+ "route-map ncn-w001 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.4\n"
+ "route-map ncn-w001-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.4\n"
+ "route-map ncn-w001-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "route-map ncn-w002 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w002 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w002 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w002 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.5\n"
+ "route-map ncn-w002 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.5\n"
+ "route-map ncn-w002-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.5\n"
+ "route-map ncn-w002-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "route-map ncn-w003 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w003 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w003 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w003 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.6\n"
+ "route-map ncn-w003 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.6\n"
+ "route-map ncn-w003-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.6\n"
+ "route-map ncn-w003-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
)
assert output in str(result.output)
print(result.output)
assert (
"system interface-group 3 speed 10g\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.3/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.3/16\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.1.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.3/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.3.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.3/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.0.1\n"
+ " ip helper-address 10.94.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.3/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.12.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "interface vlan 7\n"
+ " vrf attach Customer\n"
+ " description CAN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.11.3/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.11.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "vsx\n"
+ " system-mac 02:00:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role secondary\n"
+ " keepalive peer 192.168.255.0 source 192.168.255.1 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-001:52<==sw-spine-002\n"
+ " lag 101\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-002:52<==sw-spine-002\n"
+ " lag 101\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-003:52<==sw-spine-002\n"
+ " lag 103\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-004:52<==sw-spine-002\n"
+ " lag 103\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-001:49<==sw-spine-002\n"
+ " lag 201\n"
+ "interface 1/1/6\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-002:49<==sw-spine-002\n"
+ " lag 201\n"
+ "interface 1/1/30\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.1/31\n"
+ "interface 1/1/31\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/32\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/36\n"
+ " no shutdown\n"
+ " ip address 10.103.15.190/30\n"
+ " exit\n"
+ "interface 1/1/20\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ion-node<==sw-spine-002\n"
+ " no routing\n"
+ " vlan access 7\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
) in str(result.output)
print(result.output)
assert (
"ip dns server-address 10.92.100.225\n"
+ "ip prefix-list pl-cmn seq 10 permit 192.168.12.0/24 ge 24\n"
+ "ip prefix-list pl-can seq 20 permit 192.168.11.0/24 ge 24\n"
+ "ip prefix-list pl-hmn seq 30 permit 10.94.100.0/24 ge 24\n"
+ "ip prefix-list pl-nmn seq 40 permit 10.92.100.0/24 ge 24\n"
+ "ip prefix-list tftp seq 10 permit 10.92.100.60/32 ge 32 le 32\n"
+ "ip prefix-list tftp seq 20 permit 10.94.100.60/32 ge 32 le 32\n"
+ "ip route 0.0.0.0/0 10.103.15.189\n"
+ "router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.3\n"
+ " default-information originate\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.3\n"
+ " redistribute bgp\n"
+ " area 0.0.0.0\n"
+ "router bgp 65533\n"
+ " bgp router-id 10.2.0.3\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.3.2 remote-as 65533\n"
+ " neighbor 192.168.4.4 remote-as 65531\n"
+ " neighbor 192.168.4.4 passive\n"
+ " neighbor 192.168.4.5 remote-as 65531\n"
+ " neighbor 192.168.4.5 passive\n"
+ " neighbor 192.168.4.6 remote-as 65531\n"
+ " neighbor 192.168.4.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.3.2 activate\n"
+ " neighbor 192.168.4.4 activate\n"
+ " neighbor 192.168.4.4 route-map ncn-w001 in\n"
+ " neighbor 192.168.4.5 activate\n"
+ " neighbor 192.168.4.5 route-map ncn-w002 in\n"
+ " neighbor 192.168.4.6 activate\n"
+ " neighbor 192.168.4.6 route-map ncn-w003 in\n"
+ " exit-address-family\n"
+ " vrf Customer\n"
+ " bgp router-id 10.2.0.3\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.12.2 remote-as 65533\n"
+ " neighbor 192.168.12.4 remote-as 65532\n"
+ " neighbor 192.168.12.4 passive\n"
+ " neighbor 192.168.12.5 remote-as 65532\n"
+ " neighbor 192.168.12.5 passive\n"
+ " neighbor 192.168.12.6 remote-as 65532\n"
+ " neighbor 192.168.12.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.12.2 activate\n"
+ " neighbor 192.168.12.4 activate\n"
+ " neighbor 192.168.12.4 route-map ncn-w001-Customer in\n"
+ " neighbor 192.168.12.5 activate\n"
+ " neighbor 192.168.12.5 route-map ncn-w002-Customer in\n"
+ " neighbor 192.168.12.6 activate\n"
+ " neighbor 192.168.12.6 route-map ncn-w003-Customer in\n"
+ " exit-address-family\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_spine_secondary():
"""Test that the `canu generate switch config` command runs and returns valid secondary spine config."""
spine_secondary = "sw-spine-002"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
spine_secondary,
],
)
assert result.exit_code == 0
assert "hostname sw-spine-002\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf Customer\n"
+ "vrf keepalive\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
print(result.output)
assert (
"ssh server vrf Customer\n"
+ "ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 7\n"
+ " name CAN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 10\n"
+ " name SUN\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree priority 0\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
sw_spine_to_leaf = (
"interface lag 101 multi-chassis\n"
+ " no shutdown\n"
+ " description spine_to_leaf_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-001:52<==sw-spine-002\n"
+ " lag 101\n"
+ "\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-002:52<==sw-spine-002\n"
+ " lag 101\n"
+ "\n"
+ "interface lag 103 multi-chassis\n"
+ " no shutdown\n"
+ " description spine_to_leaf_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-003:52<==sw-spine-002\n"
+ " lag 103\n"
+ "\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-004:52<==sw-spine-002\n"
+ " lag 103\n"
)
assert sw_spine_to_leaf in str(result.output)
spine_to_cdu = (
"interface lag 201 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-cdu-001:49<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-001:49<==sw-spine-002\n"
+ " lag 201\n"
+ "\n"
+ "interface 1/1/6\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-002:49<==sw-spine-002\n"
+ " lag 201\n"
)
assert spine_to_cdu in str(result.output)
print(result.output)
assert (
"interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
+ "interface 1/1/30\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.1/31\n"
+ "interface 1/1/31\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/32\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "vsx\n"
+ " system-mac 02:00:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role secondary\n"
+ " keepalive peer 192.168.255.0 source 192.168.255.1 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.3/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.3/16\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.1.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.3/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.3.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.3/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.0.1\n"
+ " ip helper-address 10.94.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.3/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.12.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "interface vlan 7\n"
+ " vrf attach Customer\n"
+ " description CAN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.11.3/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.11.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "ip dns server-address 10.92.100.225\n"
) in str(result.output)
print(result.output)
assert (
"ip prefix-list pl-cmn seq 10 permit 192.168.12.0/24 ge 24\n"
+ "ip prefix-list pl-can seq 20 permit 192.168.11.0/24 ge 24\n"
+ "ip prefix-list pl-hmn seq 30 permit 10.94.100.0/24 ge 24\n"
+ "ip prefix-list pl-nmn seq 40 permit 10.92.100.0/24 ge 24\n"
+ "ip prefix-list tftp seq 10 permit 10.92.100.60/32 ge 32 le 32\n"
+ "ip prefix-list tftp seq 20 permit 10.94.100.60/32 ge 32 le 32\n"
) in str(result.output)
print(result.output)
assert (
"route-map ncn-w001 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w001 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w001 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w001 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.4\n"
+ "route-map ncn-w001 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.4\n"
+ "\n"
+ "route-map ncn-w001-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.4\n"
+ "route-map ncn-w001-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "\n"
+ "\n"
+ "route-map ncn-w002 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w002 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w002 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w002 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.5\n"
+ "route-map ncn-w002 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.5\n"
+ "\n"
+ "route-map ncn-w002-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.5\n"
+ "route-map ncn-w002-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "\n"
+ "\n"
+ "route-map ncn-w003 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w003 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w003 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w003 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.6\n"
+ "route-map ncn-w003 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.6\n"
+ "\n"
+ "route-map ncn-w003-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.6\n"
+ "route-map ncn-w003-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
) in str(result.output)
print(result.output)
assert (
"router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.3\n"
+ " default-information originate\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.3\n"
+ " redistribute bgp\n"
+ " area 0.0.0.0\n"
+ "\n"
+ "router bgp 65533\n"
+ " bgp router-id 10.2.0.3\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.3.2 remote-as 65533\n"
+ " neighbor 192.168.4.4 remote-as 65531\n"
+ " neighbor 192.168.4.4 passive\n"
+ " neighbor 192.168.4.5 remote-as 65531\n"
+ " neighbor 192.168.4.5 passive\n"
+ " neighbor 192.168.4.6 remote-as 65531\n"
+ " neighbor 192.168.4.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.3.2 activate\n"
+ " neighbor 192.168.4.4 activate\n"
+ " neighbor 192.168.4.4 route-map ncn-w001 in\n"
+ " neighbor 192.168.4.5 activate\n"
+ " neighbor 192.168.4.5 route-map ncn-w002 in\n"
+ " neighbor 192.168.4.6 activate\n"
+ " neighbor 192.168.4.6 route-map ncn-w003 in\n"
+ " exit-address-family\n"
+ " vrf Customer\n"
+ " bgp router-id 10.2.0.3\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.12.2 remote-as 65533\n"
+ " neighbor 192.168.12.4 remote-as 65532\n"
+ " neighbor 192.168.12.4 passive\n"
+ " neighbor 192.168.12.5 remote-as 65532\n"
+ " neighbor 192.168.12.5 passive\n"
+ " neighbor 192.168.12.6 remote-as 65532\n"
+ " neighbor 192.168.12.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.12.2 activate\n"
+ " neighbor 192.168.12.4 activate\n"
+ " neighbor 192.168.12.4 route-map ncn-w001-Customer in\n"
+ " neighbor 192.168.12.5 activate\n"
+ " neighbor 192.168.12.5 route-map ncn-w002-Customer in\n"
+ " neighbor 192.168.12.6 activate\n"
+ " neighbor 192.168.12.6 route-map ncn-w003-Customer in\n"
+ " exit-address-family\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_leaf_primary():
"""Test that the `canu generate switch config` command runs and returns valid primary leaf config."""
leaf_primary = "sw-leaf-001"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
leaf_primary,
],
)
assert result.exit_code == 0
assert "hostname sw-leaf-001\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf Customer\n"
+ "vrf keepalive\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
print(result.output)
assert (
"ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "ssh server vrf Customer\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 7\n"
+ " name CAN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 10\n"
+ " name SUN\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
ncn_m = (
"interface lag 1 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m001:ocp:1<==sw-leaf-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m001:ocp:1<==sw-leaf-001\n"
+ " lag 1\n"
+ "\n"
+ "interface lag 3 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m002:ocp:1<==sw-leaf-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m002:ocp:1<==sw-leaf-001\n"
+ " lag 3\n"
)
assert ncn_m in str(result.output)
ncn_w = (
"interface lag 5 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-w001:ocp:1<==sw-leaf-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w001:ocp:1<==sw-leaf-001\n"
+ " lag 5\n"
)
assert ncn_w in str(result.output)
ncn_s = (
"interface lag 7 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s001:ocp:1<==sw-leaf-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/7\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:ocp:1<==sw-leaf-001\n"
+ " lag 7\n"
+ "\n"
+ "interface lag 8 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s001:ocp:2<==sw-leaf-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/8\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:ocp:2<==sw-leaf-001\n"
+ " lag 8\n"
+ "\n"
+ "interface lag 9 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s002:ocp:1<==sw-leaf-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/9\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:ocp:1<==sw-leaf-001\n"
+ " lag 9\n"
+ "\n"
+ "interface lag 10 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s002:ocp:2<==sw-leaf-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/10\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:ocp:2<==sw-leaf-001\n"
+ " lag 10\n"
)
assert ncn_s in str(result.output)
leaf_to_leaf_bmc = (
"interface lag 151 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-leaf-bmc-001:48<==sw-leaf-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ "\n"
+ "interface 1/1/51\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-bmc-001:48<==sw-leaf-001\n"
+ " lag 151\n"
)
assert leaf_to_leaf_bmc in str(result.output)
leaf_to_spine = (
"interface lag 101 multi-chassis\n"
+ " no shutdown\n"
+ " description leaf_to_spines_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ "\n"
+ "interface 1/1/52\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-002:1<==sw-leaf-001\n"
+ " lag 101\n"
+ "\n"
+ "interface 1/1/53\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-001:1<==sw-leaf-001\n"
+ " lag 101\n"
)
assert leaf_to_spine in str(result.output)
print(result.output)
assert (
"interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
+ "interface 1/1/54\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.0/31\n"
+ "interface 1/1/55\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/56\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "vsx\n"
+ " system-mac 02:01:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role primary\n"
+ " keepalive peer 192.168.255.1 source 192.168.255.0 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.4/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.4/16\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.4/17\n"
+ " ip ospf 1 area 0.0.0.0\n"
) in str(result.output)
print(result.output)
assert (
"interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.4/17\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.4/24\n"
+ " ip ospf 2 area 0.0.0.0\n"
) in str(result.output)
print(result.output)
assert (
"router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.4\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.4\n"
+ " area 0.0.0.0\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_leaf_primary_to_uan():
"""Test that the `canu generate switch config` command runs and returns valid primary leaf config."""
leaf_primary_3 = "sw-leaf-003"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
leaf_primary_3,
],
)
assert result.exit_code == 0
assert "hostname sw-leaf-003\n"
uan = (
"interface 1/1/7\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description uan001:ocp:1<==sw-leaf-003\n"
+ " no routing\n"
+ " vlan access 2\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface lag 8 multi-chassis\n"
+ " no shutdown\n"
+ " description uan001:ocp:2<==sw-leaf-003\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/8\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description uan001:ocp:2<==sw-leaf-003\n"
+ " lag 8\n"
)
assert uan in str(result.output)
def test_switch_config_leaf_secondary():
"""Test that the `canu generate switch config` command runs and returns valid secondary leaf config."""
leaf_secondary = "sw-leaf-002"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
leaf_secondary,
],
)
assert result.exit_code == 0
assert "hostname sw-leaf-002\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf Customer\n"
+ "vrf keepalive\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
print(result.output)
assert (
"ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "ssh server vrf Customer\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 7\n"
+ " name CAN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 10\n"
+ " name SUN\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
ncn_m = (
"interface lag 1 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m001:pcie-slot1:1<==sw-leaf-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m001:pcie-slot1:1<==sw-leaf-002\n"
+ " lag 1\n"
+ "\n"
+ "interface lag 3 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m002:pcie-slot1:1<==sw-leaf-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m002:pcie-slot1:1<==sw-leaf-002\n"
+ " lag 3\n"
)
assert ncn_m in str(result.output)
ncn_w = (
"interface lag 5 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-w001:ocp:2<==sw-leaf-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/6\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w001:ocp:2<==sw-leaf-002\n"
+ " lag 5\n"
)
assert ncn_w in str(result.output)
ncn_s = (
"interface lag 7 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s001:pcie-slot1:1<==sw-leaf-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/7\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:pcie-slot1:1<==sw-leaf-002\n"
+ " lag 7\n"
+ "\n"
+ "interface lag 8 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s001:pcie-slot1:2<==sw-leaf-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/8\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:pcie-slot1:2<==sw-leaf-002\n"
+ " lag 8\n"
+ "\n"
+ "interface lag 9 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s002:pcie-slot1:1<==sw-leaf-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/9\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:pcie-slot1:1<==sw-leaf-002\n"
+ " lag 9\n"
+ "\n"
+ "interface lag 10 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s002:pcie-slot1:2<==sw-leaf-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/10\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:pcie-slot1:2<==sw-leaf-002\n"
+ " lag 10\n"
)
assert ncn_s in str(result.output)
leaf_to_leaf_bmc = (
"interface lag 151 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-leaf-bmc-001:47<==sw-leaf-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ "\n"
+ "interface 1/1/51\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-bmc-001:47<==sw-leaf-002\n"
+ " lag 151\n"
)
assert leaf_to_leaf_bmc in str(result.output)
leaf_to_spine = (
"interface lag 101 multi-chassis\n"
+ " no shutdown\n"
+ " description leaf_to_spines_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ "\n"
+ "interface 1/1/52\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-002:2<==sw-leaf-002\n"
+ " lag 101\n"
+ "\n"
+ "interface 1/1/53\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-001:2<==sw-leaf-002\n"
+ " lag 101\n"
)
assert leaf_to_spine in str(result.output)
print(result.output)
assert (
"interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
+ "interface 1/1/54\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.1/31\n"
+ "interface 1/1/55\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/56\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "vsx\n"
+ " system-mac 02:01:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role secondary\n"
+ " keepalive peer 192.168.255.0 source 192.168.255.1 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.5/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.5/16\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.5/17\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.5/17\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.5/24\n"
+ " ip ospf 2 area 0.0.0.0\n"
) in str(result.output)
print(result.output)
assert (
"router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.5\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.5\n"
+ " area 0.0.0.0\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_leaf_secondary_to_uan():
"""Test that the `canu generate switch config` command runs and returns valid secondary leaf config."""
leaf_secondary_3 = "sw-leaf-004"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
leaf_secondary_3,
],
)
assert result.exit_code == 0
assert "hostname sw-leaf-004\n"
uan = (
"interface 1/1/7\n"
+ " mtu 9198\n"
+ " description uan001:pcie-slot1:1<==sw-leaf-004\n"
+ " no routing\n"
+ " vlan access 2\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface lag 8 multi-chassis\n"
+ " no shutdown\n"
+ " description uan001:pcie-slot1:2<==sw-leaf-004\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/8\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description uan001:pcie-slot1:2<==sw-leaf-004\n"
+ " lag 8\n"
)
assert uan in str(result.output)
def test_switch_config_cdu_primary():
"""Test that the `canu generate switch config` command runs and returns valid primary cdu config."""
cdu_primary = "sw-cdu-001"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
cdu_primary,
],
)
assert result.exit_code == 0
assert "hostname sw-cdu-001\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf keepalive\n"
+ "vrf Customer\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
print(result.output)
assert (
"ssh server vrf Customer\n"
+ "ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
cmm = (
"interface lag 2 multi-chassis static\n"
+ " no shutdown\n"
+ " description cmm-x3002-000:1<==sw-cdu-001\n"
+ " no routing\n"
+ " vlan trunk native 2000\n"
+ " vlan trunk allowed 2000,3000\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cmm-x3002-000:1<==sw-cdu-001\n"
+ " lag 2\n"
+ "interface lag 3 multi-chassis static\n"
+ " no shutdown\n"
+ " description cmm-x3002-001:1<==sw-cdu-001\n"
+ " no routing\n"
+ " vlan trunk native 2000\n"
+ " vlan trunk allowed 2000,3000\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cmm-x3002-001:1<==sw-cdu-001\n"
+ " lag 3\n"
+ "interface lag 4 multi-chassis static\n"
+ " no shutdown\n"
+ " description cmm-x3002-002:1<==sw-cdu-001\n"
+ " no routing\n"
+ " vlan trunk native 2000\n"
+ " vlan trunk allowed 2000,3000\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cmm-x3002-002:1<==sw-cdu-001\n"
+ " lag 4\n"
+ "interface lag 5 multi-chassis static\n"
+ " no shutdown\n"
+ " description cmm-x3002-003:1<==sw-cdu-001\n"
+ " no routing\n"
+ " vlan trunk native 2000\n"
+ " vlan trunk allowed 2000,3000\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cmm-x3002-003:1<==sw-cdu-001\n"
+ " lag 5\n"
)
assert cmm in str(result.output)
cec = (
"interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cec-x3002-000:1<==sw-cdu-001\n"
+ " no routing\n"
+ " vlan access 3000\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
)
assert cec in str(result.output)
cdu_to_spine = (
"interface lag 255 multi-chassis\n"
+ " no shutdown\n"
+ " description cdu_to_spines_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ "\n"
+ "interface 1/1/49\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-002:5<==sw-cdu-001\n"
+ " lag 255\n"
+ "\n"
+ "interface 1/1/50\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-001:5<==sw-cdu-001\n"
+ " lag 255\n"
)
assert cdu_to_spine in str(result.output)
print(result.output)
assert (
"interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
+ "interface 1/1/48\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.0/31\n"
+ "interface 1/1/51\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/52\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "vsx\n"
+ " system-mac 02:02:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role primary\n"
+ " keepalive peer 192.168.255.1 source 192.168.255.0 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.16/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.16/16\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
) in str(result.output)
mtn_hmn_vlan = (
"vlan 3000\n"
+ " name cabinet_3002\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "\n"
+ "interface vlan 3000\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.104.2/22\n"
+ " active-gateway ip mac 12:00:00:00:73:00\n"
+ " active-gateway ip 192.168.104.1\n"
+ " ipv6 address autoconfig\n"
+ " ip helper-address 10.94.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
)
assert mtn_hmn_vlan in str(result.output)
mtn_nmn_vlan = (
"vlan 2000\n"
+ " name cabinet_3002\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "\n"
+ "interface vlan 2000\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.100.2/22\n"
+ " active-gateway ip mac 12:00:00:00:73:00\n"
+ " active-gateway ip 192.168.100.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
)
assert mtn_nmn_vlan in str(result.output)
print(result.output)
assert (
"ip dns server-address 10.92.100.225\n"
+ "router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.16\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.16\n"
+ " area 0.0.0.0\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_cdu_secondary():
"""Test that the `canu generate switch config` command runs and returns valid secondary cdu config."""
cdu_secondary = "sw-cdu-002"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
cdu_secondary,
],
)
assert result.exit_code == 0
assert "hostname sw-cdu-002\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf keepalive\n"
+ "vrf Customer\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
print(result.output)
assert (
"ssh server vrf Customer\n"
+ "ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
cmm = (
"interface lag 2 multi-chassis static\n"
+ " no shutdown\n"
+ " description cmm-x3002-000:2<==sw-cdu-002\n"
+ " no routing\n"
+ " vlan trunk native 2000\n"
+ " vlan trunk allowed 2000,3000\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cmm-x3002-000:2<==sw-cdu-002\n"
+ " lag 2\n"
+ "interface lag 3 multi-chassis static\n"
+ " no shutdown\n"
+ " description cmm-x3002-001:2<==sw-cdu-002\n"
+ " no routing\n"
+ " vlan trunk native 2000\n"
+ " vlan trunk allowed 2000,3000\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cmm-x3002-001:2<==sw-cdu-002\n"
+ " lag 3\n"
+ "interface lag 4 multi-chassis static\n"
+ " no shutdown\n"
+ " description cmm-x3002-002:2<==sw-cdu-002\n"
+ " no routing\n"
+ " vlan trunk native 2000\n"
+ " vlan trunk allowed 2000,3000\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cmm-x3002-002:2<==sw-cdu-002\n"
+ " lag 4\n"
+ "interface lag 5 multi-chassis static\n"
+ " no shutdown\n"
+ " description cmm-x3002-003:2<==sw-cdu-002\n"
+ " no routing\n"
+ " vlan trunk native 2000\n"
+ " vlan trunk allowed 2000,3000\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cmm-x3002-003:2<==sw-cdu-002\n"
+ " lag 5\n"
)
assert cmm in str(result.output)
cdu_to_spine = (
"interface lag 255 multi-chassis\n"
+ " no shutdown\n"
+ " description cdu_to_spines_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ "\n"
+ "interface 1/1/49\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-002:6<==sw-cdu-002\n"
+ " lag 255\n"
+ "\n"
+ "interface 1/1/50\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-001:6<==sw-cdu-002\n"
+ " lag 255\n"
)
assert cdu_to_spine in str(result.output)
print(result.output)
assert (
"interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
+ "interface 1/1/48\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.1/31\n"
+ "interface 1/1/51\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/52\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "vsx\n"
+ " system-mac 02:02:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role secondary\n"
+ " keepalive peer 192.168.255.0 source 192.168.255.1 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.17/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.17/16\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
) in str(result.output)
mtn_hmn_vlan = (
"vlan 3000\n"
+ " name cabinet_3002\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "\n"
+ "interface vlan 3000\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.104.3/22\n"
+ " active-gateway ip mac 12:00:00:00:73:00\n"
+ " active-gateway ip 192.168.104.1\n"
+ " ipv6 address autoconfig\n"
+ " ip helper-address 10.94.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
)
assert mtn_hmn_vlan in str(result.output)
mtn_nmn_vlan = (
"vlan 2000\n"
+ " name cabinet_3002\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "\n"
+ "interface vlan 2000\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.100.3/22\n"
+ " active-gateway ip mac 12:00:00:00:73:00\n"
+ " active-gateway ip 192.168.100.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
)
assert mtn_nmn_vlan in str(result.output)
print(result.output)
assert (
"ip dns server-address 10.92.100.225\n"
+ "router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.17\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.17\n"
+ " area 0.0.0.0\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_leaf_bmc():
"""Test that the `canu generate switch config` command runs and returns valid leaf-bmc config."""
leaf_bmc = "sw-leaf-bmc-001"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
leaf_bmc,
],
)
assert result.exit_code == 0
assert "hostname sw-leaf-bmc-001\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf Customer\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
print(result.output)
assert (
"ssh server vrf default\n"
+ "ssh server vrf mgmt\n"
+ "ssh server vrf Customer\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
compute_leaf_bmc = (
"interface 1/1/24\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cn001:onboard:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 2\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/25\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cn002:onboard:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 2\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/26\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cn003:onboard:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 2\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/27\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cn004:onboard:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 2\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
)
assert compute_leaf_bmc in str(result.output)
bmc = (
"interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m001:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m002:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m003:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w001:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w002:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/6\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w003:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/7\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/8\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/9\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s003:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/10\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description uan001:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
)
assert bmc in str(result.output)
leaf_bmc_to_leaf = (
"interface lag 255\n"
+ " no shutdown\n"
+ " description leaf_bmc_to_leaf_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ "\n"
+ "interface 1/1/47\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-002:51<==sw-leaf-bmc-001\n"
+ " lag 255\n"
+ "\n"
+ "interface 1/1/48\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-001:51<==sw-leaf-bmc-001\n"
+ " lag 255\n"
)
assert leaf_bmc_to_leaf in str(result.output)
print(result.output)
assert (
"interface loopback 0\n"
+ " ip address 10.2.0.12/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.12/16\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.12/17\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.12/17\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.12/24\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "snmp-server vrf default\n"
+ "ip dns server-address 10.92.100.225\n"
+ "router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.12\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.12\n"
+ " area 0.0.0.0\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_csi_file_missing():
"""Test that the `canu generate switch config` command errors on sls_file.json file missing."""
bad_sls_file = "/bad_file.json"
with runner.isolated_filesystem():
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
bad_sls_file,
"--name",
switch_name,
],
)
assert result.exit_code == 2
assert "No such file or directory" in str(result.output)
def test_switch_config_missing_file():
"""Test that the `canu generate switch config` command fails on missing file."""
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
switch_name,
],
)
assert result.exit_code == 2
print(result.output)
assert (
"Error: Missing one of the required mutually exclusive options from 'Network input source' option group:\n"
" '--ccj'\n"
" '--shcd'\n"
) in str(result.output)
def test_switch_config_bad_file():
"""Test that the `canu generate switch config` command fails on bad file."""
bad_file = "does_not_exist.xlsx"
with runner.isolated_filesystem():
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
bad_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
switch_name,
],
)
assert result.exit_code == 2
assert "Error: Invalid value for '--shcd':" in str(result.output)
def test_switch_config_missing_tabs():
"""Test that the `canu generate switch config` command prompts for missing tabs."""
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
switch_name,
"--corners",
corners,
"--sls-file",
sls_file,
],
input="SWITCH_TO_SWITCH,NON_COMPUTE_NODES,HARDWARE_MANAGEMENT,COMPUTE_NODES\n",
)
assert result.exit_code == 0
assert "hostname sw-spine-001" in str(result.output)
def test_switch_config_bad_tab():
"""Test that the `canu generate switch config` command fails on bad tab name."""
bad_tab = "BAD_TAB_NAME"
bad_tab_corners = "I14,S48"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
bad_tab,
"--corners",
bad_tab_corners,
"--sls-file",
sls_file,
"--name",
switch_name,
],
)
assert result.exit_code == 1
assert f"Tab BAD_TAB_NAME not found in {test_file}" in str(result.output)
def test_switch_config_switch_name_prompt():
"""Test that the `canu generate switch config` command prompts for missing switch name."""
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
],
input="sw-spine-001\n",
)
assert result.exit_code == 0
assert "hostname sw-spine-001" in str(result.output)
assert "ntp server 192.168.4.4" in str(result.output)
assert "ntp server 192.168.4.5" in str(result.output)
assert "ntp server 192.168.4.6" in str(result.output)
assert "deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0" in str(
result.output,
)
assert "interface 1/1/30" in str(result.output)
assert "interface 1/1/31" in str(result.output)
assert "interface 1/1/32" in str(result.output)
def test_switch_config_corner_prompt():
"""Test that the `canu generate switch config` command prompts for corner input and runs."""
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--sls-file",
sls_file,
"--name",
switch_name,
],
input="J14\nT42\nJ14\nT48\nJ14\nT24\nJ14\nT23",
)
assert result.exit_code == 0
assert "hostname sw-spine-001" in str(result.output)
assert "ntp server 192.168.4.4" in str(result.output)
assert "ntp server 192.168.4.5" in str(result.output)
assert "ntp server 192.168.4.6" in str(result.output)
assert "deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0" in str(
result.output,
)
assert "interface 1/1/30" in str(result.output)
assert "interface 1/1/31" in str(result.output)
assert "interface 1/1/32" in str(result.output)
def test_switch_config_not_enough_corners():
"""Test that the `canu generate switch config` command fails on not enough corners."""
not_enough_corners = "H16"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
not_enough_corners,
"--sls-file",
sls_file,
"--name",
switch_name,
],
)
assert result.exit_code == 0
assert "There were 1 corners entered, but there should be 8." in str(
result.output,
)
def test_switch_config_bad_switch_name_1():
"""Test that the `canu generate switch config` command fails on bad switch name."""
bad_name_1 = "sw-bad"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
bad_name_1,
],
)
assert result.exit_code == 1
print(result.output)
assert (
f"For switch {bad_name_1}, the type cannot be determined. Please check the switch name and try again."
in str(result.output)
)
def test_switch_config_bad_switch_name_2():
"""Test that the `canu generate switch config` command fails on bad switch name."""
bad_name_2 = "sw-spine-999"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
bad_name_2,
],
)
assert result.exit_code == 1
print(result.output)
assert (
f"For switch {bad_name_2}, the type cannot be determined. Please check the switch name and try again."
in str(result.output)
)
def test_switch_config_non_switch():
"""Test that the `canu generate switch config` command fails on non switch."""
non_switch = "ncn-w001"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--sls-file",
sls_file,
"--name",
non_switch,
],
)
assert result.exit_code == 1
print(result.output)
assert (
f"{non_switch} is not a switch. Only switch config can be generated."
in str(result.output)
)
@responses.activate
def test_switch_config_sls():
"""Test that the `canu generate switch config` command runs with SLS."""
with runner.isolated_filesystem():
responses.add(
responses.GET,
f"https://{sls_address}/apis/sls/v1/networks",
json=sls_networks,
)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--name",
switch_name,
],
)
assert result.exit_code == 0
assert "hostname sw-spine-001" in str(result.output)
assert "ntp server 192.168.4.4" in str(result.output)
assert "ntp server 192.168.4.5" in str(result.output)
assert "ntp server 192.168.4.6" in str(result.output)
assert "deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0" in str(
result.output,
)
assert "interface 1/1/30" in str(result.output)
assert "interface 1/1/31" in str(result.output)
assert "interface 1/1/32" in str(result.output)
@responses.activate
def test_switch_config_sls_token_bad():
"""Test that the `canu generate switch config` command errors on bad token file."""
bad_token = "bad_token.token"
with runner.isolated_filesystem():
with open(bad_token, "w") as f:
f.write('{"access_token": "123"}')
responses.add(
responses.GET,
f"https://{sls_address}/apis/sls/v1/networks",
body=requests.exceptions.HTTPError(
"503 Server Error: Service Unavailable for url",
),
)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--name",
switch_name,
"--auth-token",
bad_token,
],
)
assert result.exit_code == 0
print(result.output)
assert (
"Error connecting SLS api-gw-service-nmn.local, check that the token is valid, or generate a new one"
in str(result.output)
)
@responses.activate
def test_switch_config_sls_token_missing():
"""Test that the `canu generate switch config` command errors on no token file."""
bad_token = "no_token.token"
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--name",
switch_name,
"--auth-token",
bad_token,
],
)
assert result.exit_code == 0
assert "Invalid token file, generate another token or try again." in str(
result.output,
)
@responses.activate
def test_switch_config_sls_address_bad():
"""Test that the `canu generate switch config` command errors with bad SLS address."""
bad_sls_address = "192.168.254.254"
responses.add(
responses.GET,
f"https://{bad_sls_address}/apis/sls/v1/networks",
body=requests.exceptions.ConnectionError(
"Failed to establish a new connection: [Errno 51] Network is unreachable",
),
)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture,
"--shcd",
test_file,
"--tabs",
tabs,
"--corners",
corners,
"--name",
switch_name,
"--sls-address",
bad_sls_address,
],
)
assert result.exit_code == 0
assert (
"Error connecting to SLS 192.168.254.254, check the address or pass in a new address using --sls-address."
in str(result.output)
)
# TDS Tests
def test_switch_config_tds_spine_primary():
"""Test that the `canu generate switch config` command runs and returns valid TDS primary spine config."""
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture_tds,
"--shcd",
test_file_tds,
"--tabs",
tabs_tds,
"--corners",
corners_tds,
"--sls-file",
sls_file,
"--name",
switch_name,
],
)
assert result.exit_code == 0
assert "hostname sw-spine-001\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf Customer\n"
+ "vrf keepalive\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
print(result.output)
assert (
"ssh server vrf Customer\n"
+ "ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 7\n"
+ " name CAN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 10\n"
+ " name SUN\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree priority 0\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
ncn_m = (
"interface lag 1 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m001:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m001:ocp:1<==sw-spine-001\n"
+ " lag 1\n"
+ "\n"
+ "interface lag 2 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m002:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m002:ocp:1<==sw-spine-001\n"
+ " lag 2\n"
+ "\n"
+ "interface lag 3 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m003:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m003:ocp:1<==sw-spine-001\n"
+ " lag 3\n"
)
assert ncn_m in str(result.output)
ncn_w = (
"interface lag 4 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-w001:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w001:ocp:1<==sw-spine-001\n"
+ " lag 4\n"
+ "\n"
+ "interface lag 5 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-w002:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w002:ocp:1<==sw-spine-001\n"
+ " lag 5\n"
+ "\n"
+ "interface lag 6 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-w003:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/6\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w003:ocp:1<==sw-spine-001\n"
+ " lag 6\n"
)
assert ncn_w in str(result.output)
ncn_s = (
"interface lag 7 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s001:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/7\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:ocp:1<==sw-spine-001\n"
+ " lag 7\n"
+ "\n"
+ "interface lag 8 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s001:ocp:2<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/8\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:ocp:2<==sw-spine-001\n"
+ " lag 8\n"
+ "\n"
+ "interface lag 9 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s002:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/9\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:ocp:1<==sw-spine-001\n"
+ " lag 9\n"
+ "\n"
+ "interface lag 10 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s002:ocp:2<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/10\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:ocp:2<==sw-spine-001\n"
+ " lag 10\n"
+ "\n"
+ "interface lag 11 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s003:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/11\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s003:ocp:1<==sw-spine-001\n"
+ " lag 11\n"
+ "\n"
+ "interface lag 12 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s003:ocp:2<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/12\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s003:ocp:2<==sw-spine-001\n"
+ " lag 12\n"
)
assert ncn_s in str(result.output)
uan = (
"interface 1/1/13\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description uan001:ocp:1<==sw-spine-001\n"
+ " no routing\n"
+ " vlan access 2\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface lag 14 multi-chassis\n"
+ " no shutdown\n"
+ " description uan001:ocp:2<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/14\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description uan001:ocp:2<==sw-spine-001\n"
+ " lag 14\n"
)
assert uan in str(result.output)
sw_spine_to_leaf_bmc = (
"interface lag 151 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-leaf-bmc-001:48<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ "\n"
+ "interface 1/1/51\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-bmc-001:48<==sw-spine-001\n"
+ " lag 151\n"
)
assert sw_spine_to_leaf_bmc in str(result.output)
spine_to_cdu = (
"interface 1/1/49\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-002:50<==sw-spine-001\n"
+ " lag 201\n"
+ "\n"
+ "interface lag 201 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-cdu-001:50<==sw-spine-001\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/50\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-001:50<==sw-spine-001\n"
+ " lag 201\n"
)
assert spine_to_cdu in str(result.output)
print(result.output)
assert (
"interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
+ "interface 1/1/54\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.0/31\n"
+ "interface 1/1/55\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/56\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "vsx\n"
+ " system-mac 02:00:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role primary\n"
+ " keepalive peer 192.168.255.1 source 192.168.255.0 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.2/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.2/16\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.1.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.2/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.3.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.2/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.0.1\n"
+ " ip helper-address 10.94.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.2/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.12.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "interface vlan 7\n"
+ " vrf attach Customer\n"
+ " description CAN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.11.2/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.11.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "ip dns server-address 10.92.100.225\n"
) in str(result.output)
print(result.output)
assert (
"ip prefix-list pl-cmn seq 10 permit 192.168.12.0/24 ge 24\n"
+ "ip prefix-list pl-can seq 20 permit 192.168.11.0/24 ge 24\n"
+ "ip prefix-list pl-hmn seq 30 permit 10.94.100.0/24 ge 24\n"
+ "ip prefix-list pl-nmn seq 40 permit 10.92.100.0/24 ge 24\n"
+ "ip prefix-list tftp seq 10 permit 10.92.100.60/32 ge 32 le 32\n"
+ "ip prefix-list tftp seq 20 permit 10.94.100.60/32 ge 32 le 32\n"
) in str(result.output)
print(result.output)
assert (
"route-map ncn-w001 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w001 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w001 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w001 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.4\n"
+ "route-map ncn-w001 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.4\n"
+ "\n"
+ "route-map ncn-w001-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.4\n"
+ "route-map ncn-w001-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "\n"
+ "\n"
+ "route-map ncn-w002 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w002 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w002 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w002 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.5\n"
+ "route-map ncn-w002 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.5\n"
+ "\n"
+ "route-map ncn-w002-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.5\n"
+ "route-map ncn-w002-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "\n"
+ "\n"
+ "route-map ncn-w003 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w003 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w003 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w003 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.6\n"
+ "route-map ncn-w003 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.6\n"
+ "\n"
+ "route-map ncn-w003-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.6\n"
+ "route-map ncn-w003-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
) in str(result.output)
print(result.output)
assert (
"router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.2\n"
+ " default-information originate\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.2\n"
+ " redistribute bgp\n"
+ " area 0.0.0.0\n"
+ "\n"
+ "router bgp 65533\n"
+ " bgp router-id 10.2.0.2\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.3.3 remote-as 65533\n"
+ " neighbor 192.168.4.4 remote-as 65531\n"
+ " neighbor 192.168.4.4 passive\n"
+ " neighbor 192.168.4.5 remote-as 65531\n"
+ " neighbor 192.168.4.5 passive\n"
+ " neighbor 192.168.4.6 remote-as 65531\n"
+ " neighbor 192.168.4.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.3.3 activate\n"
+ " neighbor 192.168.4.4 activate\n"
+ " neighbor 192.168.4.4 route-map ncn-w001 in\n"
+ " neighbor 192.168.4.5 activate\n"
+ " neighbor 192.168.4.5 route-map ncn-w002 in\n"
+ " neighbor 192.168.4.6 activate\n"
+ " neighbor 192.168.4.6 route-map ncn-w003 in\n"
+ " exit-address-family\n"
+ " vrf Customer\n"
+ " bgp router-id 10.2.0.2\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.12.3 remote-as 65533\n"
+ " neighbor 192.168.12.4 remote-as 65532\n"
+ " neighbor 192.168.12.4 passive\n"
+ " neighbor 192.168.12.5 remote-as 65532\n"
+ " neighbor 192.168.12.5 passive\n"
+ " neighbor 192.168.12.6 remote-as 65532\n"
+ " neighbor 192.168.12.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.12.3 activate\n"
+ " neighbor 192.168.12.4 activate\n"
+ " neighbor 192.168.12.4 route-map ncn-w001-Customer in\n"
+ " neighbor 192.168.12.5 activate\n"
+ " neighbor 192.168.12.5 route-map ncn-w002-Customer in\n"
+ " neighbor 192.168.12.6 activate\n"
+ " neighbor 192.168.12.6 route-map ncn-w003-Customer in\n"
+ " exit-address-family\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_tds_spine_secondary():
"""Test that the `canu generate switch config` command runs and returns valid TDS secondary spine config."""
spine_secondary = "sw-spine-002"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture_tds,
"--shcd",
test_file_tds,
"--tabs",
tabs_tds,
"--corners",
corners_tds,
"--sls-file",
sls_file,
"--name",
spine_secondary,
],
)
assert result.exit_code == 0
assert "hostname sw-spine-002\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf Customer\n"
+ "vrf keepalive\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
print(result.output)
assert (
"ssh server vrf Customer\n"
+ "ssh server vrf default\n"
+ "ssh server vrf keepalive\n"
+ "ssh server vrf mgmt\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 7\n"
+ " name CAN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "vlan 10\n"
+ " name SUN\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree priority 0\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
ncn_m = (
"interface lag 1 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m001:pcie-slot1:1<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m001:pcie-slot1:1<==sw-spine-002\n"
+ " lag 1\n"
+ "\n"
+ "interface lag 2 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m002:pcie-slot1:1<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m002:pcie-slot1:1<==sw-spine-002\n"
+ " lag 2\n"
+ "\n"
+ "interface lag 3 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-m003:pcie-slot1:1<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m003:pcie-slot1:1<==sw-spine-002\n"
+ " lag 3\n"
)
assert ncn_m in str(result.output)
ncn_w = (
"interface lag 4 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-w001:ocp:2<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w001:ocp:2<==sw-spine-002\n"
+ " lag 4\n"
+ "\n"
+ "interface lag 5 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-w002:ocp:2<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w002:ocp:2<==sw-spine-002\n"
+ " lag 5\n"
+ "\n"
+ "interface lag 6 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-w003:ocp:2<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/6\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w003:ocp:2<==sw-spine-002\n"
+ " lag 6\n"
)
assert ncn_w in str(result.output)
ncn_s = (
"interface lag 7 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s001:pcie-slot1:1<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/7\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:pcie-slot1:1<==sw-spine-002\n"
+ " lag 7\n"
+ "\n"
+ "interface lag 8 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s001:pcie-slot1:2<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/8\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:pcie-slot1:2<==sw-spine-002\n"
+ " lag 8\n"
+ "\n"
+ "interface lag 9 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s002:pcie-slot1:1<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/9\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:pcie-slot1:1<==sw-spine-002\n"
+ " lag 9\n"
+ "\n"
+ "interface lag 10 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s002:pcie-slot1:2<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/10\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:pcie-slot1:2<==sw-spine-002\n"
+ " lag 10\n"
+ "\n"
+ "interface lag 11 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s003:pcie-slot1:1<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6-7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/11\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s003:pcie-slot1:1<==sw-spine-002\n"
+ " lag 11\n"
+ "\n"
+ "interface lag 12 multi-chassis\n"
+ " no shutdown\n"
+ " description ncn-s003:pcie-slot1:2<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 10\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/12\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s003:pcie-slot1:2<==sw-spine-002\n"
+ " lag 12\n"
)
assert ncn_s in str(result.output)
uan = (
"interface 1/1/13\n"
+ " mtu 9198\n"
+ " description uan001:pcie-slot1:1<==sw-spine-002\n"
+ " no routing\n"
+ " vlan access 2\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface lag 14 multi-chassis\n"
+ " no shutdown\n"
+ " description uan001:pcie-slot1:2<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 7\n"
+ " lacp mode active\n"
+ " lacp fallback\n"
+ " spanning-tree port-type admin-edge\n"
+ "\n"
+ "interface 1/1/14\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description uan001:pcie-slot1:2<==sw-spine-002\n"
+ " lag 14\n"
)
assert uan in str(result.output)
sw_spine_to_leaf_bmc = (
"interface lag 151 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-leaf-bmc-001:47<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ "\n"
+ "interface 1/1/51\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-leaf-bmc-001:47<==sw-spine-002\n"
+ " lag 151\n"
)
assert sw_spine_to_leaf_bmc in str(result.output)
spine_to_cdu = (
"interface 1/1/49\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-002:49<==sw-spine-002\n"
+ " lag 201\n"
+ "\n"
+ "interface lag 201 multi-chassis\n"
+ " no shutdown\n"
+ " description sw-cdu-001:49<==sw-spine-002\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ " spanning-tree root-guard\n"
+ "\n"
+ "interface 1/1/50\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-cdu-001:49<==sw-spine-002\n"
+ " lag 201\n"
)
assert spine_to_cdu in str(result.output)
print(result.output)
assert (
"interface lag 256\n"
+ " no shutdown\n"
+ " description ISL link\n"
+ " no routing\n"
+ " vlan trunk native 1 tag\n"
+ " vlan trunk allowed all\n"
+ " lacp mode active\n"
+ "interface 1/1/54\n"
+ " no shutdown\n"
+ " vrf attach keepalive\n"
+ " description VSX keepalive\n"
+ " ip address 192.168.255.1/31\n"
+ "interface 1/1/55\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "interface 1/1/56\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description vsx isl\n"
+ " lag 256\n"
+ "vsx\n"
+ " system-mac 02:00:00:00:01:00\n"
+ " inter-switch-link lag 256\n"
+ " role secondary\n"
+ " keepalive peer 192.168.255.0 source 192.168.255.1 vrf keepalive\n"
+ " linkup-delay-timer 600\n"
+ " vsx-sync vsx-global\n"
+ "\n"
+ "interface loopback 0\n"
+ " ip address 10.2.0.3/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.3/16\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.1.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.3/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.3.1\n"
+ " ip helper-address 10.92.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.3/17\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.0.1\n"
+ " ip helper-address 10.94.100.222\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.3/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.12.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "interface vlan 7\n"
+ " vrf attach Customer\n"
+ " description CAN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.11.3/24\n"
+ " active-gateway ip mac 12:00:00:00:6b:00\n"
+ " active-gateway ip 192.168.11.1\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "ip dns server-address 10.92.100.225\n"
) in str(result.output)
print(result.output)
assert (
"ip prefix-list pl-cmn seq 10 permit 192.168.12.0/24 ge 24\n"
+ "ip prefix-list pl-can seq 20 permit 192.168.11.0/24 ge 24\n"
+ "ip prefix-list pl-hmn seq 30 permit 10.94.100.0/24 ge 24\n"
+ "ip prefix-list pl-nmn seq 40 permit 10.92.100.0/24 ge 24\n"
+ "ip prefix-list tftp seq 10 permit 10.92.100.60/32 ge 32 le 32\n"
+ "ip prefix-list tftp seq 20 permit 10.94.100.60/32 ge 32 le 32\n"
) in str(result.output)
print(result.output)
assert (
"route-map ncn-w001 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w001 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w001 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w001 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.4\n"
+ "route-map ncn-w001 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.4\n"
+ "\n"
+ "route-map ncn-w001-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.4\n"
+ "route-map ncn-w001-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "\n"
+ "\n"
+ "route-map ncn-w002 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w002 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w002 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w002 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.5\n"
+ "route-map ncn-w002 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.5\n"
+ "\n"
+ "route-map ncn-w002-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.5\n"
+ "route-map ncn-w002-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
+ "\n"
+ "\n"
+ "route-map ncn-w003 permit seq 10\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.4\n"
+ " set local-preference 1000\n"
+ "route-map ncn-w003 permit seq 20\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.5\n"
+ " set local-preference 1100\n"
+ "route-map ncn-w003 permit seq 30\n"
+ " match ip address prefix-list tftp\n"
+ " match ip next-hop 192.168.4.6\n"
+ " set local-preference 1200\n"
+ "route-map ncn-w003 permit seq 40\n"
+ " match ip address prefix-list pl-hmn\n"
+ " set ip next-hop 192.168.0.6\n"
+ "route-map ncn-w003 permit seq 50\n"
+ " match ip address prefix-list pl-nmn\n"
+ " set ip next-hop 192.168.4.6\n"
+ "\n"
+ "route-map ncn-w003-Customer permit seq 10\n"
+ " match ip address prefix-list pl-can\n"
+ " set ip next-hop 192.168.11.6\n"
+ "route-map ncn-w003-Customer permit seq 20\n"
+ " match ip address prefix-list pl-cmn\n"
) in str(result.output)
print(result.output)
assert (
"router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.3\n"
+ " default-information originate\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.3\n"
+ " redistribute bgp\n"
+ " area 0.0.0.0\n"
+ "\n"
+ "router bgp 65533\n"
+ " bgp router-id 10.2.0.3\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.3.2 remote-as 65533\n"
+ " neighbor 192.168.4.4 remote-as 65531\n"
+ " neighbor 192.168.4.4 passive\n"
+ " neighbor 192.168.4.5 remote-as 65531\n"
+ " neighbor 192.168.4.5 passive\n"
+ " neighbor 192.168.4.6 remote-as 65531\n"
+ " neighbor 192.168.4.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.3.2 activate\n"
+ " neighbor 192.168.4.4 activate\n"
+ " neighbor 192.168.4.4 route-map ncn-w001 in\n"
+ " neighbor 192.168.4.5 activate\n"
+ " neighbor 192.168.4.5 route-map ncn-w002 in\n"
+ " neighbor 192.168.4.6 activate\n"
+ " neighbor 192.168.4.6 route-map ncn-w003 in\n"
+ " exit-address-family\n"
+ " vrf Customer\n"
+ " bgp router-id 10.2.0.3\n"
+ " maximum-paths 8\n"
+ " timers bgp 1 3\n"
+ " distance bgp 20 70\n"
+ " neighbor 192.168.12.2 remote-as 65533\n"
+ " neighbor 192.168.12.4 remote-as 65532\n"
+ " neighbor 192.168.12.4 passive\n"
+ " neighbor 192.168.12.5 remote-as 65532\n"
+ " neighbor 192.168.12.5 passive\n"
+ " neighbor 192.168.12.6 remote-as 65532\n"
+ " neighbor 192.168.12.6 passive\n"
+ " address-family ipv4 unicast\n"
+ " neighbor 192.168.12.2 activate\n"
+ " neighbor 192.168.12.4 activate\n"
+ " neighbor 192.168.12.4 route-map ncn-w001-Customer in\n"
+ " neighbor 192.168.12.5 activate\n"
+ " neighbor 192.168.12.5 route-map ncn-w002-Customer in\n"
+ " neighbor 192.168.12.6 activate\n"
+ " neighbor 192.168.12.6 route-map ncn-w003-Customer in\n"
+ " exit-address-family\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
def test_switch_config_tds_leaf_bmc():
"""Test that the `canu generate switch config` command runs and returns valid tds leaf-bmc config."""
leaf_bmc_tds = "sw-leaf-bmc-001"
with runner.isolated_filesystem():
with open(sls_file, "w") as f:
json.dump(sls_input, f)
result = runner.invoke(
cli,
[
"--cache",
cache_minutes,
"generate",
"switch",
"config",
"--csm",
csm,
"--architecture",
architecture_tds,
"--shcd",
test_file_tds,
"--tabs",
tabs_tds,
"--corners",
corners_tds,
"--sls-file",
sls_file,
"--name",
leaf_bmc_tds,
],
)
assert result.exit_code == 0
assert "hostname sw-leaf-bmc-001\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"no ip icmp redirect\n"
+ "vrf Customer\n"
+ "ntp server 192.168.4.4\n"
+ "ntp server 192.168.4.5\n"
+ "ntp server 192.168.4.6\n"
+ "ntp enable\n"
) in str(result.output)
assert "ssh server vrf default\n"
assert banner_motd in str(result.output)
print(result.output)
assert (
"ssh server vrf mgmt\n"
+ "ssh server vrf Customer\n"
+ "access-list ip mgmt\n"
+ " 10 comment ALLOW SSH, HTTPS, AND SNMP ON HMN SUBNET and CMN\n"
+ " 20 permit tcp 192.168.0.0/255.255.128.0 any eq ssh\n"
+ " 30 permit tcp 192.168.0.0/255.255.128.0 any eq https\n"
+ " 40 permit udp 192.168.0.0/255.255.128.0 any eq snmp\n"
+ " 50 permit udp 192.168.0.0/255.255.128.0 any eq snmp-trap\n"
+ " 60 permit tcp 192.168.12.0/255.255.255.0 any eq ssh\n"
+ " 70 permit tcp 192.168.12.0/255.255.255.0 any eq https\n"
+ " 80 permit udp 192.168.12.0/255.255.255.0 any eq snmp\n"
+ " 90 permit udp 192.168.12.0/255.255.255.0 any eq snmp-trap\n"
+ " 100 comment ALLOW SNMP FROM HMN METALLB SUBNET\n"
+ " 110 permit udp 10.94.100.0/255.255.255.0 any eq snmp\n"
+ " 120 permit udp 10.94.100.0/255.255.255.0 any eq snmp-trap\n"
+ " 130 comment BLOCK SSH, HTTPS, AND SNMP FROM EVERYWHERE ELSE\n"
+ " 140 deny tcp any any eq ssh\n"
+ " 150 deny tcp any any eq https\n"
+ " 160 deny udp any any eq snmp\n"
+ " 170 deny udp any any eq snmp-trap\n"
+ " 180 comment ALLOW ANYTHING ELSE\n"
+ " 190 permit any any any\n"
+ "access-list ip nmn-hmn\n"
+ " 10 deny any 192.168.3.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 20 deny any 192.168.0.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 30 deny any 192.168.3.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 40 deny any 192.168.0.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 50 deny any 192.168.100.0/255.255.128.0 192.168.0.0/255.255.128.0\n"
+ " 60 deny any 192.168.100.0/255.255.128.0 192.168.200.0/255.255.128.0\n"
+ " 70 deny any 192.168.200.0/255.255.128.0 192.168.3.0/255.255.128.0\n"
+ " 80 deny any 192.168.200.0/255.255.128.0 192.168.100.0/255.255.128.0\n"
+ " 90 deny any 10.92.100.0/255.255.255.0 192.168.0.0/255.255.128.0\n"
+ " 100 deny any 10.94.100.0/255.255.255.0 192.168.3.0/255.255.128.0\n"
+ " 110 deny any 192.168.0.0/255.255.128.0 10.92.100.0/255.255.255.0\n"
+ " 120 deny any 192.168.3.0/255.255.128.0 10.94.100.0/255.255.255.0\n"
+ " 130 permit any any any\n"
+ "access-list ip cmn-can\n"
+ " 10 deny any 192.168.12.0/255.255.255.0 192.168.11.0/255.255.255.0\n"
+ " 20 deny any 192.168.11.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 30 deny any 192.168.12.0/255.255.255.0 192.168.200.0/255.255.255.0\n"
+ " 40 deny any 192.168.200.0/255.255.255.0 192.168.12.0/255.255.255.0\n"
+ " 50 permit any any any\n"
+ "apply access-list ip mgmt control-plane vrf default\n"
+ "apply access-list ip mgmt control-plane vrf Customer\n"
+ "\n"
+ "vlan 1\n"
+ "vlan 2\n"
+ " name NMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 4\n"
+ " name HMN\n"
+ " apply access-list ip nmn-hmn in\n"
+ " apply access-list ip nmn-hmn out\n"
+ "vlan 6\n"
+ " name CMN\n"
+ " apply access-list ip cmn-can in\n"
+ " apply access-list ip cmn-can out\n"
+ "\n"
+ "spanning-tree\n"
+ "spanning-tree forward-delay 4\n"
+ "spanning-tree config-name MST0\n"
+ "spanning-tree config-revision 1\n"
+ "interface mgmt\n"
+ " shutdown\n"
+ " ip dhcp\n"
) in str(result.output)
leaf_bmc_to_leaf = (
"interface lag 101\n"
+ " no shutdown\n"
+ " description leaf_bmc_to_spine_lag\n"
+ " no routing\n"
+ " vlan trunk native 1\n"
+ " vlan trunk allowed 1-2,4,6\n"
+ " lacp mode active\n"
+ "interface 1/1/47\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-002:51<==sw-leaf-bmc-001\n"
+ " lag 101\n"
+ "interface 1/1/48\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description sw-spine-001:51<==sw-leaf-bmc-001\n"
+ " lag 101\n"
)
assert leaf_bmc_to_leaf in str(result.output)
bmc = (
"interface 1/1/1\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m001:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/2\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m002:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/3\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-m003:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/4\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w001:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/5\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w002:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/6\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-w003:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/7\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s001:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/8\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s002:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/9\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description ncn-s003:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/10\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description uan001:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/11\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cn001:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/12\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cn002:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/13\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cn003:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
+ "interface 1/1/14\n"
+ " no shutdown\n"
+ " mtu 9198\n"
+ " description cn004:bmc:1<==sw-leaf-bmc-001\n"
+ " no routing\n"
+ " vlan access 4\n"
+ " spanning-tree bpdu-guard\n"
+ " spanning-tree port-type admin-edge\n"
)
assert bmc in str(result.output)
print(result.output)
assert (
"interface loopback 0\n"
+ " ip address 10.2.0.12/32\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 1\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.1.12/16\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 2\n"
+ " description NMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.3.12/17\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ "interface vlan 4\n"
+ " description HMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.0.12/17\n"
+ " ip ospf 1 area 0.0.0.0\n"
+ " ip ospf passive\n"
+ "interface vlan 6\n"
+ " vrf attach Customer\n"
+ " description CMN\n"
+ " ip mtu 9198\n"
+ " ip address 192.168.12.12/24\n"
+ " ip ospf 2 area 0.0.0.0\n"
+ "snmp-server vrf default\n"
+ "ip dns server-address 10.92.100.225\n"
+ "router ospf 2 vrf Customer\n"
+ " router-id 10.2.0.12\n"
+ " area 0.0.0.0\n"
+ "router ospf 1\n"
+ " router-id 10.2.0.12\n"
+ " area 0.0.0.0\n"
+ "https-server vrf Customer\n"
+ "https-server vrf default\n"
+ "https-server vrf mgmt\n"
) in str(result.output)
sls_input = {
"Networks": {
"CAN": {
"Name": "CAN",
"ExtraProperties": {
"CIDR": "192.168.11.0/24",
"Subnets": [
{
"Name": "bootstrap_dhcp",
"CIDR": "192.168.11.0/24",
"IPReservations": [
{"Name": "can-switch-1", "IPAddress": "192.168.11.2"},
{"Name": "can-switch-2", "IPAddress": "192.168.11.3"},
],
"VlanID": 7,
"Gateway": "192.168.11.1",
},
{
"FullName": "CAN Bootstrap DHCP Subnet",
"CIDR": "192.168.11.0/24",
"IPReservations": [
{"Name": "ncn-w001", "IPAddress": "192.168.11.4"},
{"Name": "ncn-w002", "IPAddress": "192.168.11.5"},
{"Name": "ncn-w003", "IPAddress": "192.168.11.6"},
],
"Name": "bootstrap_dhcp",
"VlanID": 7,
"Gateway": "192.168.11.1",
},
],
},
},
"CHN": {
"Name": "CHN",
"ExtraProperties": {
"CIDR": "192.168.200.0/24",
"MyASN": 65530,
"PeerASN": 65533,
"Subnets": [
{
"Name": "bootstrap_dhcp",
"CIDR": "192.168.200.0/24",
"IPReservations": [
{"Name": "chn-switch-1", "IPAddress": "192.168.200.2"},
{"Name": "chn-switch-2", "IPAddress": "192.168.200.3"},
],
"VlanID": 5,
"Gateway": "192.168.200.1",
},
{
"FullName": "CHN Bootstrap DHCP Subnet",
"CIDR": "192.168.200.0/24",
"IPReservations": [
{"Name": "ncn-w001", "IPAddress": "192.168.200.4"},
{"Name": "ncn-w002", "IPAddress": "192.168.200.5"},
{"Name": "ncn-w003", "IPAddress": "192.168.200.6"},
],
"Name": "bootstrap_dhcp",
"VlanID": 5,
"Gateway": "192.168.200.1",
},
],
},
},
"CMN": {
"Name": "CMN",
"ExtraProperties": {
"CIDR": "192.168.12.0/24",
"MyASN": 65532,
"PeerASN": 65533,
"Subnets": [
{
"Name": "network_hardware",
"CIDR": "192.168.12.0/24",
"IPReservations": [
{"Name": "sw-spine-001", "IPAddress": "192.168.12.2"},
{"Name": "sw-spine-002", "IPAddress": "192.168.12.3"},
{"Name": "sw-leaf-001", "IPAddress": "192.168.12.4"},
{"Name": "sw-leaf-002", "IPAddress": "192.168.12.5"},
{"Name": "sw-leaf-003", "IPAddress": "192.168.12.6"},
{"Name": "sw-leaf-004", "IPAddress": "192.168.12.7"},
{"Name": "sw-leaf-bmc-001", "IPAddress": "192.168.12.12"},
{"Name": "sw-leaf-bmc-002", "IPAddress": "192.168.12.13"},
{"Name": "sw-leaf-bmc-003", "IPAddress": "192.168.12.14"},
{"Name": "sw-leaf-bmc-004", "IPAddress": "192.168.12.15"},
{"Name": "sw-cdu-001", "IPAddress": "192.168.12.16"},
{"Name": "sw-cdu-002", "IPAddress": "192.168.12.17"},
],
"VlanID": 6,
"Gateway": "192.168.12.1",
},
{
"FullName": "CMN Bootstrap DHCP Subnet",
"CIDR": "192.168.12.0/24",
"IPReservations": [
{"Name": "ncn-w001", "IPAddress": "192.168.12.4"},
{"Name": "ncn-w002", "IPAddress": "192.168.12.5"},
{"Name": "ncn-w003", "IPAddress": "192.168.12.6"},
],
"Name": "bootstrap_dhcp",
"VlanID": 6,
"Gateway": "192.168.12.1",
},
],
},
},
"HMN": {
"Name": "HMN",
"ExtraProperties": {
"CIDR": "192.168.0.0/17",
"Subnets": [
{
"Name": "network_hardware",
"CIDR": "192.168.0.0/17",
"IPReservations": [
{"Name": "sw-spine-001", "IPAddress": "192.168.0.2"},
{"Name": "sw-spine-002", "IPAddress": "192.168.0.3"},
{"Name": "sw-leaf-001", "IPAddress": "192.168.0.4"},
{"Name": "sw-leaf-002", "IPAddress": "192.168.0.5"},
{"Name": "sw-leaf-003", "IPAddress": "192.168.0.6"},
{"Name": "sw-leaf-004", "IPAddress": "192.168.0.7"},
{"Name": "sw-leaf-bmc-001", "IPAddress": "192.168.0.12"},
{"Name": "sw-leaf-bmc-002", "IPAddress": "192.168.0.13"},
{"Name": "sw-leaf-bmc-003", "IPAddress": "192.168.0.14"},
{"Name": "sw-leaf-bmc-004", "IPAddress": "192.168.0.15"},
{"Name": "sw-cdu-001", "IPAddress": "192.168.0.16"},
{"Name": "sw-cdu-002", "IPAddress": "192.168.0.17"},
],
"VlanID": 4,
"Gateway": "192.168.0.1",
},
{
"FullName": "HMN Bootstrap DHCP Subnet",
"CIDR": "192.168.0.0/17",
"IPReservations": [
{"Name": "ncn-w001", "IPAddress": "192.168.0.4"},
{"Name": "ncn-w002", "IPAddress": "192.168.0.5"},
{"Name": "ncn-w003", "IPAddress": "192.168.0.6"},
],
"Name": "bootstrap_dhcp",
"VlanID": 4,
"Gateway": "192.168.0.1",
},
],
},
},
"MTL": {
"Name": "MTL",
"ExtraProperties": {
"CIDR": "192.168.1.0/16",
"Subnets": [
{
"Name": "network_hardware",
"CIDR": "192.168.1.0/16",
"IPReservations": [
{"Name": "sw-spine-001", "IPAddress": "192.168.1.2"},
{"Name": "sw-spine-002", "IPAddress": "192.168.1.3"},
{"Name": "sw-leaf-001", "IPAddress": "192.168.1.4"},
{"Name": "sw-leaf-002", "IPAddress": "192.168.1.5"},
{"Name": "sw-leaf-003", "IPAddress": "192.168.1.6"},
{"Name": "sw-leaf-004", "IPAddress": "192.168.1.7"},
{"Name": "sw-leaf-bmc-001", "IPAddress": "192.168.1.12"},
{"Name": "sw-leaf-bmc-002", "IPAddress": "192.168.1.13"},
{"Name": "sw-leaf-bmc-003", "IPAddress": "192.168.1.14"},
{"Name": "sw-leaf-bmc-004", "IPAddress": "192.168.1.15"},
{"Name": "sw-cdu-001", "IPAddress": "192.168.1.16"},
{"Name": "sw-cdu-002", "IPAddress": "192.168.1.17"},
],
"VlanID": 0,
"Gateway": "192.168.1.1",
},
],
},
},
"NMN": {
"Name": "NMN",
"FullName": "Node Management Network",
"ExtraProperties": {
"CIDR": "192.168.3.0/17",
"MyASN": 65531,
"PeerASN": 65533,
"Subnets": [
{
"FullName": "NMN Management Network Infrastructure",
"CIDR": "192.168.3.0/17",
"IPReservations": [
{"Name": "sw-spine-001", "IPAddress": "192.168.3.2"},
{"Name": "sw-spine-002", "IPAddress": "192.168.3.3"},
{"Name": "sw-leaf-001", "IPAddress": "192.168.3.4"},
{"Name": "sw-leaf-002", "IPAddress": "192.168.3.5"},
{"Name": "sw-leaf-003", "IPAddress": "192.168.3.6"},
{"Name": "sw-leaf-004", "IPAddress": "192.168.3.7"},
{"Name": "sw-leaf-bmc-001", "IPAddress": "192.168.3.12"},
{"Name": "sw-leaf-bmc-002", "IPAddress": "192.168.3.13"},
{"Name": "sw-leaf-bmc-003", "IPAddress": "192.168.3.14"},
{"Name": "sw-leaf-bmc-004", "IPAddress": "192.168.3.15"},
{"Name": "sw-cdu-001", "IPAddress": "192.168.3.16"},
{"Name": "sw-cdu-002", "IPAddress": "192.168.3.17"},
],
"Name": "network_hardware",
"VlanID": 2,
"Gateway": "192.168.3.1",
},
{
"FullName": "NMN Bootstrap DHCP Subnet",
"CIDR": "192.168.4.0/17",
"IPReservations": [
{"Name": "ncn-w001", "IPAddress": "192.168.4.4"},
{"Name": "ncn-w002", "IPAddress": "192.168.4.5"},
{"Name": "ncn-w003", "IPAddress": "192.168.4.6"},
],
"Name": "bootstrap_dhcp",
"VlanID": 2,
"Gateway": "192.168.3.1",
},
],
},
},
"NMN_MTN": {
"Name": "NMN_MTN",
"ExtraProperties": {
"CIDR": "192.168.100.0/17",
"Subnets": [
{
"FullName": "",
"CIDR": "192.168.100.0/22",
"Name": "cabinet_3002",
"VlanID": 2000,
"Gateway": "192.168.100.1",
"DHCPStart": "192.168.100.10",
"DHCPEnd": "192.168.3.254",
},
],
},
},
"HMN_MTN": {
"Name": "HMN_MTN",
"ExtraProperties": {
"CIDR": "192.168.200.0/17",
"Subnets": [
{
"FullName": "",
"CIDR": "192.168.104.0/22",
"Name": "cabinet_3002",
"VlanID": 3000,
"Gateway": "192.168.104.1",
"DHCPStart": "192.168.104.10",
"DHCPEnd": "192.168.104.254",
},
],
},
},
"HMNLB": {
"Name": "HMNLB",
"ExtraProperties": {
"CIDR": "10.94.100.0/24",
"Subnets": [
{
"FullName": "NMN MetalLB",
"CIDR": "10.94.100.0/24",
"IPReservations": [
{"Name": "cray-tftp", "IPAddress": "10.94.100.60"},
{"Name": "unbound", "IPAddress": "10.94.100.225"},
],
"Name": "hmn_metallb_address_pool",
"Gateway": "10.94.100.1",
},
],
},
},
"NMNLB": {
"Name": "NMNLB",
"ExtraProperties": {
"CIDR": "10.92.100.0/24",
"Subnets": [
{
"FullName": "HMN MetalLB",
"CIDR": "10.92.100.0/24",
"IPReservations": [
{"Name": "cray-tftp", "IPAddress": "10.92.100.60"},
{"Name": "unbound", "IPAddress": "10.92.100.225"},
],
"Name": "nmn_metallb_address_pool",
"Gateway": "10.92.100.1",
},
],
},
},
},
}
sls_networks = [
network[x] for network in [sls_input.get("Networks", {})] for x in network
]
| 41.742741
| 119
| 0.444625
| 32,261
| 245,823
| 3.36524
| 0.016645
| 0.054934
| 0.032883
| 0.02708
| 0.966979
| 0.961765
| 0.956036
| 0.948437
| 0.936951
| 0.919524
| 0
| 0.165931
| 0.421177
| 245,823
| 5,888
| 120
| 41.74983
| 0.597071
| 0.015047
| 0
| 0.910553
| 0
| 0.064522
| 0.543525
| 0.085
| 0
| 0
| 0
| 0
| 0.03394
| 1
| 0.005126
| false
| 0.01043
| 0.001237
| 0
| 0.006364
| 0.010783
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e07001134fa851c283e9a61d00580ba1001fbc65
| 34,510
|
py
|
Python
|
akshare/stock_feature/stock_hist_em.py
|
NovelResearchInvestment/akshare
|
ccce37101b26e89a46b9b8b7b27b4eebf49edccd
|
[
"MIT"
] | null | null | null |
akshare/stock_feature/stock_hist_em.py
|
NovelResearchInvestment/akshare
|
ccce37101b26e89a46b9b8b7b27b4eebf49edccd
|
[
"MIT"
] | 1
|
2021-12-05T07:58:18.000Z
|
2021-12-05T07:58:18.000Z
|
akshare/stock_feature/stock_hist_em.py
|
NovelResearchInvestment/akshare
|
ccce37101b26e89a46b9b8b7b27b4eebf49edccd
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Date: 2022/2/2 23:26
Desc: 东方财富网-行情首页-沪深京 A 股
"""
import requests
import pandas as pd
def stock_zh_a_spot_em() -> pd.DataFrame:
"""
东方财富网-沪深京 A 股-实时行情
http://quote.eastmoney.com/center/gridlist.html#hs_a_board
:return: 实时行情
:rtype: pandas.DataFrame
"""
url = "http://82.push2.eastmoney.com/api/qt/clist/get"
params = {
"pn": "1",
"pz": "5000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:0 t:6,m:0 t:80,m:1 t:2,m:1 t:23,m:0 t:81 s:2048",
"fields": "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152",
"_": "1623833739532",
}
r = requests.get(url, params=params)
data_json = r.json()
if not data_json["data"]["diff"]:
return pd.DataFrame()
temp_df = pd.DataFrame(data_json["data"]["diff"])
temp_df.columns = [
"_",
"最新价",
"涨跌幅",
"涨跌额",
"成交量",
"成交额",
"振幅",
"换手率",
"市盈率-动态",
"量比",
"_",
"代码",
"_",
"名称",
"最高",
"最低",
"今开",
"昨收",
"_",
"_",
"_",
"市净率",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
]
temp_df.reset_index(inplace=True)
temp_df["index"] = temp_df.index + 1
temp_df.rename(columns={"index": "序号"}, inplace=True)
temp_df = temp_df[
[
"序号",
"代码",
"名称",
"最新价",
"涨跌幅",
"涨跌额",
"成交量",
"成交额",
"振幅",
"最高",
"最低",
"今开",
"昨收",
"量比",
"换手率",
"市盈率-动态",
"市净率",
]
]
temp_df["最新价"] = pd.to_numeric(temp_df["最新价"], errors="coerce")
temp_df["涨跌幅"] = pd.to_numeric(temp_df["涨跌幅"], errors="coerce")
temp_df["涨跌额"] = pd.to_numeric(temp_df["涨跌额"], errors="coerce")
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"], errors="coerce")
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"], errors="coerce")
temp_df["振幅"] = pd.to_numeric(temp_df["振幅"], errors="coerce")
temp_df["最高"] = pd.to_numeric(temp_df["最高"], errors="coerce")
temp_df["最低"] = pd.to_numeric(temp_df["最低"], errors="coerce")
temp_df["今开"] = pd.to_numeric(temp_df["今开"], errors="coerce")
temp_df["昨收"] = pd.to_numeric(temp_df["昨收"], errors="coerce")
temp_df["量比"] = pd.to_numeric(temp_df["量比"], errors="coerce")
temp_df["换手率"] = pd.to_numeric(temp_df["换手率"], errors="coerce")
temp_df["市盈率-动态"] = pd.to_numeric(temp_df["市盈率-动态"], errors="coerce")
temp_df["市净率"] = pd.to_numeric(temp_df["市净率"], errors="coerce")
return temp_df
def stock_zh_b_spot_em() -> pd.DataFrame:
"""
东方财富网- B 股-实时行情
http://quote.eastmoney.com/center/gridlist.html#hs_a_board
:return: 实时行情
:rtype: pandas.DataFrame
"""
url = "http://28.push2.eastmoney.com/api/qt/clist/get"
params = {
"pn": "1",
"pz": "5000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:0 t:7,m:1 t:3",
"fields": "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152",
"_": "1623833739532",
}
r = requests.get(url, params=params)
data_json = r.json()
if not data_json["data"]["diff"]:
return pd.DataFrame()
temp_df = pd.DataFrame(data_json["data"]["diff"])
temp_df.columns = [
"_",
"最新价",
"涨跌幅",
"涨跌额",
"成交量",
"成交额",
"振幅",
"换手率",
"市盈率-动态",
"量比",
"_",
"代码",
"_",
"名称",
"最高",
"最低",
"今开",
"昨收",
"_",
"_",
"_",
"市净率",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
]
temp_df.reset_index(inplace=True)
temp_df["index"] = range(1, len(temp_df) + 1)
temp_df.rename(columns={"index": "序号"}, inplace=True)
temp_df = temp_df[
[
"序号",
"代码",
"名称",
"最新价",
"涨跌幅",
"涨跌额",
"成交量",
"成交额",
"振幅",
"最高",
"最低",
"今开",
"昨收",
"量比",
"换手率",
"市盈率-动态",
"市净率",
]
]
temp_df["最新价"] = pd.to_numeric(temp_df["最新价"], errors="coerce")
temp_df["涨跌幅"] = pd.to_numeric(temp_df["涨跌幅"], errors="coerce")
temp_df["涨跌额"] = pd.to_numeric(temp_df["涨跌额"], errors="coerce")
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"], errors="coerce")
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"], errors="coerce")
temp_df["振幅"] = pd.to_numeric(temp_df["振幅"], errors="coerce")
temp_df["最高"] = pd.to_numeric(temp_df["最高"], errors="coerce")
temp_df["最低"] = pd.to_numeric(temp_df["最低"], errors="coerce")
temp_df["今开"] = pd.to_numeric(temp_df["今开"], errors="coerce")
temp_df["昨收"] = pd.to_numeric(temp_df["昨收"], errors="coerce")
temp_df["量比"] = pd.to_numeric(temp_df["量比"], errors="coerce")
temp_df["换手率"] = pd.to_numeric(temp_df["换手率"], errors="coerce")
temp_df["市盈率-动态"] = pd.to_numeric(temp_df["市盈率-动态"], errors="coerce")
temp_df["市净率"] = pd.to_numeric(temp_df["市净率"], errors="coerce")
return temp_df
def code_id_map_em() -> dict:
"""
东方财富-股票和市场代码
http://quote.eastmoney.com/center/gridlist.html#hs_a_board
:return: 股票和市场代码
:rtype: dict
"""
url = "http://80.push2.eastmoney.com/api/qt/clist/get"
params = {
"pn": "1",
"pz": "5000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:1 t:2,m:1 t:23",
"fields": "f12",
"_": "1623833739532",
}
r = requests.get(url, params=params)
data_json = r.json()
if not data_json["data"]["diff"]:
return dict()
temp_df = pd.DataFrame(data_json["data"]["diff"])
temp_df["market_id"] = 1
temp_df.columns = ["sh_code", "sh_id"]
code_id_dict = dict(zip(temp_df["sh_code"], temp_df["sh_id"]))
params = {
"pn": "1",
"pz": "5000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:0 t:6,m:0 t:80",
"fields": "f12",
"_": "1623833739532",
}
r = requests.get(url, params=params)
data_json = r.json()
if not data_json["data"]["diff"]:
return dict()
temp_df_sz = pd.DataFrame(data_json["data"]["diff"])
temp_df_sz["sz_id"] = 0
code_id_dict.update(dict(zip(temp_df_sz["f12"], temp_df_sz["sz_id"])))
params = {
"pn": "1",
"pz": "5000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:0 t:81 s:2048",
"fields": "f12",
"_": "1623833739532",
}
r = requests.get(url, params=params)
data_json = r.json()
if not data_json["data"]["diff"]:
return dict()
temp_df_sz = pd.DataFrame(data_json["data"]["diff"])
temp_df_sz["bj_id"] = 0
code_id_dict.update(dict(zip(temp_df_sz["f12"], temp_df_sz["bj_id"])))
return code_id_dict
def stock_zh_a_hist(
symbol: str = "000001",
period: str = "daily",
start_date: str = "19700101",
end_date: str = "20500101",
adjust: str = "",
) -> pd.DataFrame:
"""
东方财富网-行情首页-沪深京 A 股-每日行情
http://quote.eastmoney.com/concept/sh603777.html?from=classic
:param symbol: 股票代码
:type symbol: str
:param period: choice of {'daily', 'weekly', 'monthly'}
:type period: str
:param start_date: 开始日期
:type start_date: str
:param end_date: 结束日期
:type end_date: str
:param adjust: choice of {"qfq": "前复权", "hfq": "后复权", "": "不复权"}
:type adjust: str
:return: 每日行情
:rtype: pandas.DataFrame
"""
code_id_dict = code_id_map_em()
adjust_dict = {"qfq": "1", "hfq": "2", "": "0"}
period_dict = {"daily": "101", "weekly": "102", "monthly": "103"}
url = "http://push2his.eastmoney.com/api/qt/stock/kline/get"
params = {
"fields1": "f1,f2,f3,f4,f5,f6",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f116",
"ut": "7eea3edcaed734bea9cbfc24409ed989",
"klt": period_dict[period],
"fqt": adjust_dict[adjust],
"secid": f"{code_id_dict[symbol]}.{symbol}",
"beg": start_date,
"end": end_date,
"_": "1623766962675",
}
r = requests.get(url, params=params)
data_json = r.json()
if not data_json["data"]["klines"]:
return pd.DataFrame()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["klines"]]
)
temp_df.columns = [
"日期",
"开盘",
"收盘",
"最高",
"最低",
"成交量",
"成交额",
"振幅",
"涨跌幅",
"涨跌额",
"换手率",
]
temp_df.index = pd.to_datetime(temp_df["日期"])
temp_df.reset_index(inplace=True, drop=True)
temp_df["开盘"] = pd.to_numeric(temp_df["开盘"])
temp_df["收盘"] = pd.to_numeric(temp_df["收盘"])
temp_df["最高"] = pd.to_numeric(temp_df["最高"])
temp_df["最低"] = pd.to_numeric(temp_df["最低"])
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"])
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"])
temp_df["振幅"] = pd.to_numeric(temp_df["振幅"])
temp_df["涨跌幅"] = pd.to_numeric(temp_df["涨跌幅"])
temp_df["涨跌额"] = pd.to_numeric(temp_df["涨跌额"])
temp_df["换手率"] = pd.to_numeric(temp_df["换手率"])
return temp_df
def stock_zh_a_hist_min_em(
symbol: str = "000001",
start_date: str = "1979-09-01 09:32:00",
end_date: str = "2222-01-01 09:32:00",
period: str = "5",
adjust: str = "",
) -> pd.DataFrame:
"""
东方财富网-行情首页-沪深京 A 股-每日分时行情
http://quote.eastmoney.com/concept/sh603777.html?from=classic
:param symbol: 股票代码
:type symbol: str
:param start_date: 开始日期
:type start_date: str
:param end_date: 结束日期
:type end_date: str
:param period: choice of {'1', '5', '15', '30', '60'}
:type period: str
:param adjust: choice of {'', 'qfq', 'hfq'}
:type adjust: str
:return: 每日分时行情
:rtype: pandas.DataFrame
"""
code_id_dict = code_id_map_em()
adjust_map = {
"": "0",
"qfq": "1",
"hfq": "2",
}
if period == "1":
url = "https://push2his.eastmoney.com/api/qt/stock/trends2/get"
params = {
"fields1": "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f11,f12,f13",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58",
"ut": "7eea3edcaed734bea9cbfc24409ed989",
"ndays": "5",
"iscr": "0",
"secid": f"{code_id_dict[symbol]}.{symbol}",
"_": "1623766962675",
}
r = requests.get(url, params=params)
data_json = r.json()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["trends"]]
)
temp_df.columns = [
"时间",
"开盘",
"收盘",
"最高",
"最低",
"成交量",
"成交额",
"最新价",
]
temp_df.index = pd.to_datetime(temp_df["时间"])
temp_df = temp_df[start_date:end_date]
temp_df.reset_index(drop=True, inplace=True)
temp_df["开盘"] = pd.to_numeric(temp_df["开盘"])
temp_df["收盘"] = pd.to_numeric(temp_df["收盘"])
temp_df["最高"] = pd.to_numeric(temp_df["最高"])
temp_df["最低"] = pd.to_numeric(temp_df["最低"])
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"])
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"])
temp_df["最新价"] = pd.to_numeric(temp_df["最新价"])
temp_df["时间"] = pd.to_datetime(temp_df["时间"]).astype(str)
return temp_df
else:
url = "http://push2his.eastmoney.com/api/qt/stock/kline/get"
params = {
"fields1": "f1,f2,f3,f4,f5,f6",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61",
"ut": "7eea3edcaed734bea9cbfc24409ed989",
"klt": period,
"fqt": adjust_map[adjust],
"secid": f"{code_id_dict[symbol]}.{symbol}",
"beg": "0",
"end": "20500000",
"_": "1630930917857",
}
r = requests.get(url, params=params)
data_json = r.json()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["klines"]]
)
temp_df.columns = [
"时间",
"开盘",
"收盘",
"最高",
"最低",
"成交量",
"成交额",
"振幅",
"涨跌幅",
"涨跌额",
"换手率",
]
temp_df.index = pd.to_datetime(temp_df["时间"])
temp_df = temp_df[start_date:end_date]
temp_df.reset_index(drop=True, inplace=True)
temp_df["开盘"] = pd.to_numeric(temp_df["开盘"])
temp_df["收盘"] = pd.to_numeric(temp_df["收盘"])
temp_df["最高"] = pd.to_numeric(temp_df["最高"])
temp_df["最低"] = pd.to_numeric(temp_df["最低"])
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"])
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"])
temp_df["振幅"] = pd.to_numeric(temp_df["振幅"])
temp_df["涨跌幅"] = pd.to_numeric(temp_df["涨跌幅"])
temp_df["涨跌额"] = pd.to_numeric(temp_df["涨跌额"])
temp_df["换手率"] = pd.to_numeric(temp_df["换手率"])
temp_df["时间"] = pd.to_datetime(temp_df["时间"]).astype(str)
temp_df = temp_df[
[
"时间",
"开盘",
"收盘",
"最高",
"最低",
"涨跌幅",
"涨跌额",
"成交量",
"成交额",
"振幅",
"换手率",
]
]
return temp_df
def stock_zh_a_hist_pre_min_em(
symbol: str = "000001",
start_time: str = "09:00:00",
end_time: str = "15:50:00",
) -> pd.DataFrame:
"""
东方财富网-行情首页-沪深京 A 股-每日分时行情包含盘前数据
http://quote.eastmoney.com/concept/sh603777.html?from=classic
:param symbol: 股票代码
:type symbol: str
:param start_time: 开始时间
:type start_time: str
:param end_time: 结束时间
:type end_time: str
:return: 每日分时行情包含盘前数据
:rtype: pandas.DataFrame
"""
code_id_dict = code_id_map_em()
url = "https://push2.eastmoney.com/api/qt/stock/trends2/get"
params = {
"fields1": "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f11,f12,f13",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58",
"ut": "fa5fd1943c7b386f172d6893dbfba10b",
"ndays": "1",
"iscr": "1",
"iscca": "0",
"secid": f"{code_id_dict[symbol]}.{symbol}",
"_": "1623766962675",
}
r = requests.get(url, params=params)
data_json = r.json()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["trends"]]
)
temp_df.columns = [
"时间",
"开盘",
"收盘",
"最高",
"最低",
"成交量",
"成交额",
"最新价",
]
temp_df.index = pd.to_datetime(temp_df["时间"])
date_format = temp_df.index[0].date().isoformat()
temp_df = temp_df[
date_format + " " + start_time : date_format + " " + end_time
]
temp_df.reset_index(drop=True, inplace=True)
temp_df["开盘"] = pd.to_numeric(temp_df["开盘"])
temp_df["收盘"] = pd.to_numeric(temp_df["收盘"])
temp_df["最高"] = pd.to_numeric(temp_df["最高"])
temp_df["最低"] = pd.to_numeric(temp_df["最低"])
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"])
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"])
temp_df["最新价"] = pd.to_numeric(temp_df["最新价"])
temp_df["时间"] = pd.to_datetime(temp_df["时间"]).astype(str)
return temp_df
def stock_hk_spot_em() -> pd.DataFrame:
"""
东方财富网-港股-实时行情
http://quote.eastmoney.com/center/gridlist.html#hk_stocks
:return: 港股-实时行情
:rtype: pandas.DataFrame
"""
url = "http://72.push2.eastmoney.com/api/qt/clist/get"
params = {
"pn": "1",
"pz": "5000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:128 t:3,m:128 t:4,m:128 t:1,m:128 t:2",
"fields": "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152",
"_": "1624010056945",
}
r = requests.get(url, params=params)
data_json = r.json()
temp_df = pd.DataFrame(data_json["data"]["diff"])
temp_df.columns = [
"_",
"最新价",
"涨跌幅",
"涨跌额",
"成交量",
"成交额",
"振幅",
"换手率",
"市盈率-动态",
"量比",
"_",
"代码",
"_",
"名称",
"最高",
"最低",
"今开",
"昨收",
"_",
"_",
"_",
"市净率",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
]
temp_df.reset_index(inplace=True)
temp_df["index"] = temp_df.index + 1
temp_df.rename(columns={"index": "序号"}, inplace=True)
temp_df = temp_df[
[
"序号",
"代码",
"名称",
"最新价",
"涨跌额",
"涨跌幅",
"今开",
"最高",
"最低",
"昨收",
"成交量",
"成交额",
]
]
temp_df["序号"] = pd.to_numeric(temp_df["序号"])
temp_df["最新价"] = pd.to_numeric(temp_df["最新价"], errors="coerce")
temp_df["涨跌额"] = pd.to_numeric(temp_df["涨跌额"], errors="coerce")
temp_df["涨跌幅"] = pd.to_numeric(temp_df["涨跌幅"], errors="coerce")
temp_df["今开"] = pd.to_numeric(temp_df["今开"], errors="coerce")
temp_df["最高"] = pd.to_numeric(temp_df["最高"], errors="coerce")
temp_df["最低"] = pd.to_numeric(temp_df["最低"], errors="coerce")
temp_df["昨收"] = pd.to_numeric(temp_df["昨收"], errors="coerce")
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"], errors="coerce")
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"], errors="coerce")
return temp_df
def stock_hk_hist(
symbol: str = "40224",
period: str = "daily",
start_date: str = "19700101",
end_date: str = "22220101",
adjust: str = "",
) -> pd.DataFrame:
"""
东方财富网-行情-港股-每日行情
http://quote.eastmoney.com/hk/08367.html
:param symbol: 港股-每日行情
:type symbol: str
:param period: choice of {'daily', 'weekly', 'monthly'}
:type period: str
:param start_date: 开始日期
:type start_date: str
:param end_date: 结束日期
:type end_date: str
:param adjust: choice of {"qfq": "1", "hfq": "2", "": "不复权"}
:type adjust: str
:return: 每日行情
:rtype: pandas.DataFrame
"""
adjust_dict = {"qfq": "1", "hfq": "2", "": "0"}
period_dict = {"daily": "101", "weekly": "102", "monthly": "103"}
url = "http://33.push2his.eastmoney.com/api/qt/stock/kline/get"
params = {
"secid": f"116.{symbol}",
"ut": "fa5fd1943c7b386f172d6893dbfba10b",
"fields1": "f1,f2,f3,f4,f5,f6",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61",
"klt": period_dict[period],
"fqt": adjust_dict[adjust],
"end": "20500000",
"lmt": "1000000",
"_": "1623766962675",
}
r = requests.get(url, params=params)
data_json = r.json()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["klines"]]
)
if temp_df.empty:
return pd.DataFrame()
temp_df.columns = [
"日期",
"开盘",
"收盘",
"最高",
"最低",
"成交量",
"成交额",
"振幅",
"涨跌幅",
"涨跌额",
"换手率",
]
temp_df.index = pd.to_datetime(temp_df["日期"])
temp_df = temp_df[start_date:end_date]
if temp_df.empty:
return pd.DataFrame()
temp_df.reset_index(inplace=True, drop=True)
temp_df["开盘"] = pd.to_numeric(temp_df["开盘"])
temp_df["收盘"] = pd.to_numeric(temp_df["收盘"])
temp_df["最高"] = pd.to_numeric(temp_df["最高"])
temp_df["最低"] = pd.to_numeric(temp_df["最低"])
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"])
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"])
temp_df["振幅"] = pd.to_numeric(temp_df["振幅"])
temp_df["涨跌幅"] = pd.to_numeric(temp_df["涨跌幅"])
temp_df["涨跌额"] = pd.to_numeric(temp_df["涨跌额"])
temp_df["换手率"] = pd.to_numeric(temp_df["换手率"])
return temp_df
def stock_hk_hist_min_em(
symbol: str = "01611",
period: str = "1",
adjust: str = "",
start_date: str = "1979-09-01 09:32:00",
end_date: str = "2222-01-01 09:32:00",
) -> pd.DataFrame:
"""
东方财富网-行情-港股-每日分时行情
http://quote.eastmoney.com/hk/00948.html
:param symbol: 股票代码
:type symbol: str
:param period: choice of {'1', '5', '15', '30', '60'}
:type period: str
:param adjust: choice of {'', 'qfq', 'hfq'}
:type adjust: str
:param start_date: 开始日期
:type start_date: str
:param end_date: 结束日期
:type end_date: str
:return: 每日分时行情
:rtype: pandas.DataFrame
"""
adjust_map = {
"": "0",
"qfq": "1",
"hfq": "2",
}
if period == "1":
url = "http://push2his.eastmoney.com/api/qt/stock/trends2/get"
params = {
"fields1": "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f11,f12,f13",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58",
"ut": "fa5fd1943c7b386f172d6893dbfba10b",
"iscr": "0",
"ndays": "5",
"secid": f"116.{symbol}",
"_": "1623766962675",
}
r = requests.get(url, params=params)
data_json = r.json()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["trends"]]
)
temp_df.columns = [
"时间",
"开盘",
"收盘",
"最高",
"最低",
"成交量",
"成交额",
"最新价",
]
temp_df.index = pd.to_datetime(temp_df["时间"])
temp_df = temp_df[start_date:end_date]
temp_df.reset_index(drop=True, inplace=True)
temp_df["开盘"] = pd.to_numeric(temp_df["开盘"])
temp_df["收盘"] = pd.to_numeric(temp_df["收盘"])
temp_df["最高"] = pd.to_numeric(temp_df["最高"])
temp_df["最低"] = pd.to_numeric(temp_df["最低"])
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"])
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"])
temp_df["最新价"] = pd.to_numeric(temp_df["最新价"])
temp_df["时间"] = pd.to_datetime(temp_df["时间"]).astype(str)
return temp_df
else:
url = "http://push2his.eastmoney.com/api/qt/stock/kline/get"
params = {
"fields1": "f1,f2,f3,f4,f5,f6",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"klt": period,
"fqt": adjust_map[adjust],
"secid": f"116.{symbol}",
"beg": "0",
"end": "20500000",
"_": "1630930917857",
}
r = requests.get(url, params=params)
data_json = r.json()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["klines"]]
)
temp_df.columns = [
"时间",
"开盘",
"收盘",
"最高",
"最低",
"成交量",
"成交额",
"振幅",
"涨跌幅",
"涨跌额",
"换手率",
]
temp_df.index = pd.to_datetime(temp_df["时间"])
temp_df = temp_df[start_date:end_date]
temp_df.reset_index(drop=True, inplace=True)
temp_df["开盘"] = pd.to_numeric(temp_df["开盘"])
temp_df["收盘"] = pd.to_numeric(temp_df["收盘"])
temp_df["最高"] = pd.to_numeric(temp_df["最高"])
temp_df["最低"] = pd.to_numeric(temp_df["最低"])
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"])
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"])
temp_df["振幅"] = pd.to_numeric(temp_df["振幅"])
temp_df["涨跌幅"] = pd.to_numeric(temp_df["涨跌幅"])
temp_df["涨跌额"] = pd.to_numeric(temp_df["涨跌额"])
temp_df["换手率"] = pd.to_numeric(temp_df["换手率"])
temp_df["时间"] = pd.to_datetime(temp_df["时间"]).astype(str)
temp_df = temp_df[
[
"时间",
"开盘",
"收盘",
"最高",
"最低",
"涨跌幅",
"涨跌额",
"成交量",
"成交额",
"振幅",
"换手率",
]
]
return temp_df
def stock_us_spot_em() -> pd.DataFrame:
"""
东方财富-美股-实时行情
http://quote.eastmoney.com/center/gridlist.html#us_stocks
:return: 美股-实时行情; 延迟 15 min
:rtype: pandas.DataFrame
"""
url = "http://72.push2.eastmoney.com/api/qt/clist/get"
params = {
"pn": "1",
"pz": "20000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:105,m:106,m:107",
"fields": "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f26,f22,f33,f11,f62,f128,f136,f115,f152",
"_": "1624010056945",
}
r = requests.get(url, params=params)
data_json = r.json()
temp_df = pd.DataFrame(data_json["data"]["diff"])
temp_df.columns = [
"_",
"最新价",
"涨跌幅",
"涨跌额",
"成交量",
"成交额",
"振幅",
"换手率",
"_",
"_",
"_",
"简称",
"编码",
"名称",
"最高价",
"最低价",
"开盘价",
"昨收价",
"总市值",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"市盈率",
"_",
"_",
"_",
"_",
"_",
]
temp_df.reset_index(inplace=True)
temp_df["index"] = range(1, len(temp_df) + 1)
temp_df.rename(columns={"index": "序号"}, inplace=True)
temp_df["代码"] = temp_df["编码"].astype(str) + "." + temp_df["简称"]
temp_df = temp_df[
[
"序号",
"名称",
"最新价",
"涨跌额",
"涨跌幅",
"开盘价",
"最高价",
"最低价",
"昨收价",
"总市值",
"市盈率",
"成交量",
"成交额",
"振幅",
"换手率",
"代码",
]
]
temp_df["最新价"] = pd.to_numeric(temp_df["最新价"], errors="coerce")
temp_df["涨跌额"] = pd.to_numeric(temp_df["涨跌额"], errors="coerce")
temp_df["涨跌幅"] = pd.to_numeric(temp_df["涨跌幅"], errors="coerce")
temp_df["开盘价"] = pd.to_numeric(temp_df["开盘价"], errors="coerce")
temp_df["最高价"] = pd.to_numeric(temp_df["最高价"], errors="coerce")
temp_df["最低价"] = pd.to_numeric(temp_df["最低价"], errors="coerce")
temp_df["昨收价"] = pd.to_numeric(temp_df["昨收价"], errors="coerce")
temp_df["总市值"] = pd.to_numeric(temp_df["总市值"], errors="coerce")
temp_df["市盈率"] = pd.to_numeric(temp_df["市盈率"], errors="coerce")
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"], errors="coerce")
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"], errors="coerce")
temp_df["振幅"] = pd.to_numeric(temp_df["振幅"], errors="coerce")
temp_df["换手率"] = pd.to_numeric(temp_df["换手率"], errors="coerce")
return temp_df
def stock_us_hist(
symbol: str = "105.MSFT",
period: str = "daily",
start_date: str = "19700101",
end_date: str = "22220101",
adjust: str = "",
) -> pd.DataFrame:
"""
东方财富网-行情-美股-每日行情
http://quote.eastmoney.com/us/ENTX.html#fullScreenChart
:param symbol: 股票代码; 此股票代码需要通过调用 ak.stock_us_spot_em() 的 `代码` 字段获取
:type symbol: str
:param period: choice of {'daily', 'weekly', 'monthly'}
:type period: str
:param start_date: 开始日期
:type start_date: str
:param end_date: 结束日期
:type end_date: str
:param adjust: choice of {"qfq": "1", "hfq": "2", "": "不复权"}
:type adjust: str
:return: 每日行情
:rtype: pandas.DataFrame
"""
period_dict = {"daily": "101", "weekly": "102", "monthly": "103"}
adjust_dict = {"qfq": "1", "hfq": "2", "": "0"}
url = "http://63.push2his.eastmoney.com/api/qt/stock/kline/get"
params = {
"secid": f"{symbol}",
"ut": "fa5fd1943c7b386f172d6893dbfba10b",
"fields1": "f1,f2,f3,f4,f5,f6",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61",
"klt": period_dict[period],
"fqt": adjust_dict[adjust],
"end": "20500000",
"lmt": "1000000",
"_": "1623766962675",
}
r = requests.get(url, params=params)
data_json = r.json()
if not data_json["data"]["klines"]:
return pd.DataFrame()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["klines"]]
)
temp_df.columns = [
"日期",
"开盘",
"收盘",
"最高",
"最低",
"成交量",
"成交额",
"振幅",
"涨跌幅",
"涨跌额",
"换手率",
]
temp_df.index = pd.to_datetime(temp_df["日期"])
temp_df = temp_df[start_date:end_date]
temp_df.reset_index(inplace=True, drop=True)
temp_df["开盘"] = pd.to_numeric(temp_df["开盘"])
temp_df["收盘"] = pd.to_numeric(temp_df["收盘"])
temp_df["最高"] = pd.to_numeric(temp_df["最高"])
temp_df["最低"] = pd.to_numeric(temp_df["最低"])
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"])
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"])
temp_df["振幅"] = pd.to_numeric(temp_df["振幅"])
temp_df["涨跌幅"] = pd.to_numeric(temp_df["涨跌幅"])
temp_df["涨跌额"] = pd.to_numeric(temp_df["涨跌额"])
temp_df["换手率"] = pd.to_numeric(temp_df["换手率"])
temp_df.sort_values(["日期"], inplace=True)
return temp_df
def stock_us_hist_min_em(
symbol: str = "105.ATER",
start_date: str = "1979-09-01 09:32:00",
end_date: str = "2222-01-01 09:32:00",
) -> pd.DataFrame:
"""
东方财富网-行情首页-美股-每日分时行情
http://quote.eastmoney.com/us/ATER.html
:param symbol: 股票代码
:type symbol: str
:param start_date: 开始日期
:type start_date: str
:param end_date: 结束日期
:type end_date: str
:return: 每日分时行情
:rtype: pandas.DataFrame
"""
url = "http://push2his.eastmoney.com/api/qt/stock/trends2/get"
params = {
"fields1": "f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f11,f12,f13",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58",
"ut": "fa5fd1943c7b386f172d6893dbfba10b",
"iscr": "0",
"ndays": "5",
"secid": f"{symbol.split('.')[0]}.{symbol.split('.')[1]}",
"_": "1623766962675",
}
r = requests.get(url, params=params)
data_json = r.json()
if not data_json["data"]["trends"]:
return pd.DataFrame()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["trends"]]
)
temp_df.columns = [
"时间",
"开盘",
"收盘",
"最高",
"最低",
"成交量",
"成交额",
"最新价",
]
temp_df.index = pd.to_datetime(temp_df["时间"])
temp_df = temp_df[start_date:end_date]
temp_df.reset_index(drop=True, inplace=True)
temp_df["开盘"] = pd.to_numeric(temp_df["开盘"])
temp_df["收盘"] = pd.to_numeric(temp_df["收盘"])
temp_df["最高"] = pd.to_numeric(temp_df["最高"])
temp_df["最低"] = pd.to_numeric(temp_df["最低"])
temp_df["成交量"] = pd.to_numeric(temp_df["成交量"])
temp_df["成交额"] = pd.to_numeric(temp_df["成交额"])
temp_df["最新价"] = pd.to_numeric(temp_df["最新价"])
temp_df["时间"] = pd.to_datetime(temp_df["时间"]).astype(str)
return temp_df
if __name__ == "__main__":
stock_hk_spot_em_df = stock_hk_spot_em()
print(stock_hk_spot_em_df)
stock_hk_hist_df = stock_hk_hist(
symbol="01246",
period="daily",
start_date="19700101",
end_date="22220101",
adjust="",
)
print(stock_hk_hist_df)
stock_hk_hist_qfq_df = stock_hk_hist(
symbol="00593",
period="weekly",
start_date="19700101",
end_date="22220101",
adjust="qfq",
)
print(stock_hk_hist_qfq_df)
stock_hk_hist_hfq_df = stock_hk_hist(
symbol="00326",
start_date="19700101",
end_date="22220101",
adjust="hfq",
)
print(stock_hk_hist_hfq_df)
stock_us_spot_em_df = stock_us_spot_em()
print(stock_us_spot_em_df)
stock_us_hist_df = stock_us_hist(
symbol="105.TKNO",
period="weekly",
start_date="19700101",
end_date="22220101",
adjust="qfq",
)
print(stock_us_hist_df)
stock_zh_a_spot_em_df = stock_zh_a_spot_em()
print(stock_zh_a_spot_em_df)
stock_zh_a_hist_df = stock_zh_a_hist(
symbol="000042",
period="daily",
start_date="20220401",
end_date="20220502",
adjust="hfq",
)
print(stock_zh_a_hist_df)
stock_zh_a_hist_min_em_df = stock_zh_a_hist_min_em(symbol="833454")
print(stock_zh_a_hist_min_em_df)
stock_zh_a_hist_pre_min_em_df = stock_zh_a_hist_pre_min_em(symbol="833454")
print(stock_zh_a_hist_pre_min_em_df)
stock_hk_hist_min_em_df = stock_hk_hist_min_em(symbol="01611")
print(stock_hk_hist_min_em_df)
stock_us_spot_em_df = stock_us_spot_em()
print(stock_us_spot_em_df)
stock_us_hist_min_em_df = stock_us_hist_min_em(symbol="106.TTE")
print(stock_us_hist_min_em_df)
stock_zh_a_hist_min_em_df = stock_zh_a_hist_min_em(
symbol="000001",
period="60",
adjust="hfq",
start_date="2022-04-15 09:32:00",
end_date="2022-04-15 14:40:00",
)
print(stock_zh_a_hist_min_em_df)
stock_zh_a_hist_df = stock_zh_a_hist(
symbol="833454",
period="daily",
start_date="20170301",
end_date="20211115",
adjust="hfq",
)
print(stock_zh_a_hist_df)
stock_hk_hist_min_em_df = stock_hk_hist_min_em(
symbol="01611",
period="1",
adjust="",
start_date="2021-11-01 09:32:00",
end_date="2021-11-01 18:32:00",
)
print(stock_hk_hist_min_em_df)
stock_zh_b_spot_em_df = stock_zh_b_spot_em()
print(stock_zh_b_spot_em_df)
| 29.470538
| 139
| 0.517589
| 4,565
| 34,510
| 3.672727
| 0.069441
| 0.143147
| 0.084636
| 0.115412
| 0.909519
| 0.881964
| 0.872182
| 0.850352
| 0.829715
| 0.808004
| 0
| 0.072662
| 0.293335
| 34,510
| 1,170
| 140
| 29.495727
| 0.614836
| 0.09064
| 0
| 0.820487
| 0
| 0.015213
| 0.200968
| 0.053277
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01217
| false
| 0
| 0.002028
| 0
| 0.03854
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
161c5be78860ec84d30716b5c3855e7d51e1ea22
| 52,005
|
py
|
Python
|
views/AntdRow.py
|
RuixiangS/feffery-antd-docs
|
c48d34ed657ec8d6893440c0ee6382598c564922
|
[
"MIT"
] | 10
|
2021-05-20T06:52:42.000Z
|
2022-03-29T08:36:58.000Z
|
views/AntdRow.py
|
RuixiangS/feffery-antd-docs
|
c48d34ed657ec8d6893440c0ee6382598c564922
|
[
"MIT"
] | null | null | null |
views/AntdRow.py
|
RuixiangS/feffery-antd-docs
|
c48d34ed657ec8d6893440c0ee6382598c564922
|
[
"MIT"
] | 2
|
2021-09-14T07:07:00.000Z
|
2021-12-10T01:03:25.000Z
|
from dash import dcc
from dash import html
import feffery_antd_components as fac
import feffery_utils_components as fuc
docs_content = html.Div(
[
html.Div(
[
html.H2(
'AntdRow(id, className, style, *args, **kwargs)',
style={
'borderLeft': '4px solid grey',
'padding': '3px 0 3px 10px',
'backgroundColor': '#f5f5f5'
}
),
fac.AntdBackTop(
containerId='docs-content',
duration=0.6
),
html.Span(
'主要参数说明:',
id='主要参数说明',
style={
'borderLeft': '4px solid grey',
'padding': '3px 0 3px 10px',
'backgroundColor': '#f5f5f5',
'fontWeight': 'bold',
'fontSize': '1.2rem'
}
),
fuc.FefferyMarkdown(
markdownStr=open('documents/AntdRow.md', encoding='utf-8').read()
),
html.Div(
html.Span(
'使用示例',
id='使用示例',
style={
'borderLeft': '4px solid grey',
'padding': '3px 0 3px 10px',
'backgroundColor': '#f5f5f5',
'fontWeight': 'bold',
'fontSize': '1.2rem'
}
),
style={
'marginBottom': '10px'
}
),
html.Div(
[
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=6
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(0, 146, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=6
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=6
),
fac.AntdCol(
html.Div(
'col4',
style={
'backgroundColor': 'rgba(0, 146, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=6
),
],
gutter=10
),
fac.AntdDivider(
'基础使用',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=6
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(0, 146, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=6
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=6
),
fac.AntdCol(
html.Div(
'col4',
style={
'backgroundColor': 'rgba(0, 146, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=6
),
],
gutter=10
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='基础使用',
className='div-highlight'
),
html.Div(
[
fac.AntdRow(
[
fac.AntdCol(
html.Div(
f'col{col + 1}',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=2
)
if col % 2 == 0
else fac.AntdCol(
html.Div(
f'col{col + 1}',
style={
'backgroundColor': 'rgba(0, 146, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=2
)
for col in range(14)
],
gutter=[5, 5]
),
fac.AntdDivider(
'行组件内部列组件的span之和大于24时自动换行',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdRow(
[
fac.AntdCol(
html.Div(
f'col{col + 1}',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=2
)
if col % 2 == 0
else fac.AntdCol(
html.Div(
f'col{col + 1}',
style={
'backgroundColor': 'rgba(0, 146, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=2
)
for col in range(14)
],
gutter=[5, 5]
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='行组件内部列组件的span之和大于24时自动换行',
className='div-highlight'
),
html.Div(
[
fac.AntdDivider('justify="start"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10
),
fac.AntdDivider('justify="end"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
justify='end'
),
fac.AntdDivider('justify="center"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
justify='center'
),
fac.AntdDivider('justify="space-around"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
justify='space-around'
),
fac.AntdDivider('justify="space-between"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
justify='space-between'
),
fac.AntdDivider(
'不同的水平对齐方式',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdDivider('justify="start"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10
),
fac.AntdDivider('justify="end"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
justify='end'
),
fac.AntdDivider('justify="center"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
justify='center'
),
fac.AntdDivider('justify="space-around"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
justify='space-around'
),
fac.AntdDivider('justify="space-between"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
justify='space-between'
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='不同的水平对齐方式',
className='div-highlight'
),
html.Div(
[
fac.AntdDivider('align="top"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10
),
fac.AntdDivider('align="middle"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
align='middle'
),
fac.AntdDivider('align="bottom"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
align='bottom'
),
fac.AntdDivider(
'不同的垂直对齐方式',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdDivider('align="top"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10
),
fac.AntdDivider('align="middle"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
align='middle'
),
fac.AntdDivider('align="bottom"', innerTextOrientation='left'),
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'col1',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col2',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '25px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
),
fac.AntdCol(
html.Div(
'col3',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '100px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
span=4
)
],
gutter=10,
align='bottom'
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='不同的垂直对齐方式',
className='div-highlight'
),
html.Div(
[
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'1/7',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
flex='1'
),
fac.AntdCol(
html.Div(
'2/7',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
flex='2'
),
fac.AntdCol(
html.Div(
'4/7',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
flex='4'
)
],
gutter=10
),
fac.AntdDivider(
'使用flex更灵活地配置列宽度',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdRow(
[
fac.AntdCol(
html.Div(
'1/7',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
flex='1'
),
fac.AntdCol(
html.Div(
'2/7',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
flex='2'
),
fac.AntdCol(
html.Div(
'4/7',
style={
'backgroundColor': 'rgba(64, 173, 255, 1)',
'color': 'white',
'height': '50px',
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
flex='4'
)
],
gutter=10
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='使用flex更灵活地配置列宽度',
className='div-highlight'
),
html.Div(style={'height': '100px'})
],
style={
'flex': 'auto'
}
),
html.Div(
fac.AntdAnchor(
linkDict=[
{'title': '主要参数说明', 'href': '#主要参数说明'},
{
'title': '使用示例',
'href': '#使用示例',
'children': [
{'title': '基础使用', 'href': '#基础使用'},
{'title': '行组件内部列组件的span之和大于24时自动换行', 'href': '#行组件内部列组件的span之和大于24时自动换行'},
{'title': '不同的水平对齐方式', 'href': '#不同的水平对齐方式'},
{'title': '不同的垂直对齐方式', 'href': '#不同的垂直对齐方式'},
{'title': '使用flex更灵活地配置列宽度', 'href': '#使用flex更灵活地配置列宽度'},
]
},
],
containerId='docs-content',
targetOffset=200
),
style={
'flex': 'none',
'margin': '20px'
}
)
],
style={
'display': 'flex'
}
)
| 39.249057
| 103
| 0.238592
| 2,249
| 52,005
| 5.512672
| 0.06225
| 0.04291
| 0.074528
| 0.090498
| 0.929908
| 0.92652
| 0.92652
| 0.922004
| 0.921439
| 0.921439
| 0
| 0.061672
| 0.667628
| 52,005
| 1,324
| 104
| 39.278701
| 0.655597
| 0
| 0
| 0.858249
| 0
| 0
| 0.39127
| 0.013441
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003098
| 0
| 0.003098
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
164c80c87d74df218d38ea814ccb7c6a5bd79e81
| 6,949
|
py
|
Python
|
python3/lib/python3.6/site-packages/tensorflow/python/keras/api/_v2/keras/layers/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 3
|
2020-10-12T15:47:01.000Z
|
2022-01-14T19:51:26.000Z
|
python3/lib/python3.6/site-packages/tensorflow/python/keras/api/_v2/keras/layers/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | null | null | null |
python3/lib/python3.6/site-packages/tensorflow/python/keras/api/_v2/keras/layers/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 2
|
2020-08-03T13:02:06.000Z
|
2020-11-04T03:15:44.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Keras layers API.
"""
from __future__ import print_function as _print_function
from tensorflow.python.feature_column.feature_column_lib import DenseFeatures
from tensorflow.python.feature_column.feature_column_lib import Layer
from tensorflow.python.keras import Input
from tensorflow.python.keras.engine import InputLayer
from tensorflow.python.keras.engine import InputSpec
from tensorflow.python.keras.layers import AbstractRNNCell
from tensorflow.python.keras.layers import Activation
from tensorflow.python.keras.layers import ActivityRegularization
from tensorflow.python.keras.layers import Add
from tensorflow.python.keras.layers import AdditiveAttention
from tensorflow.python.keras.layers import AlphaDropout
from tensorflow.python.keras.layers import Attention
from tensorflow.python.keras.layers import Average
from tensorflow.python.keras.layers import AveragePooling1D
from tensorflow.python.keras.layers import AveragePooling1D as AvgPool1D
from tensorflow.python.keras.layers import AveragePooling2D
from tensorflow.python.keras.layers import AveragePooling2D as AvgPool2D
from tensorflow.python.keras.layers import AveragePooling3D
from tensorflow.python.keras.layers import AveragePooling3D as AvgPool3D
from tensorflow.python.keras.layers import BatchNormalizationV2 as BatchNormalization
from tensorflow.python.keras.layers import Bidirectional
from tensorflow.python.keras.layers import Concatenate
from tensorflow.python.keras.layers import Conv1D
from tensorflow.python.keras.layers import Conv1D as Convolution1D
from tensorflow.python.keras.layers import Conv2D
from tensorflow.python.keras.layers import Conv2D as Convolution2D
from tensorflow.python.keras.layers import Conv2DTranspose
from tensorflow.python.keras.layers import Conv2DTranspose as Convolution2DTranspose
from tensorflow.python.keras.layers import Conv3D
from tensorflow.python.keras.layers import Conv3D as Convolution3D
from tensorflow.python.keras.layers import Conv3DTranspose
from tensorflow.python.keras.layers import Conv3DTranspose as Convolution3DTranspose
from tensorflow.python.keras.layers import ConvLSTM2D
from tensorflow.python.keras.layers import Cropping1D
from tensorflow.python.keras.layers import Cropping2D
from tensorflow.python.keras.layers import Cropping3D
from tensorflow.python.keras.layers import Dense
from tensorflow.python.keras.layers import DepthwiseConv2D
from tensorflow.python.keras.layers import Dot
from tensorflow.python.keras.layers import Dropout
from tensorflow.python.keras.layers import ELU
from tensorflow.python.keras.layers import Embedding
from tensorflow.python.keras.layers import Flatten
from tensorflow.python.keras.layers import GRUCell_v2 as GRUCell
from tensorflow.python.keras.layers import GRU_v2 as GRU
from tensorflow.python.keras.layers import GaussianDropout
from tensorflow.python.keras.layers import GaussianNoise
from tensorflow.python.keras.layers import GlobalAveragePooling1D
from tensorflow.python.keras.layers import GlobalAveragePooling1D as GlobalAvgPool1D
from tensorflow.python.keras.layers import GlobalAveragePooling2D
from tensorflow.python.keras.layers import GlobalAveragePooling2D as GlobalAvgPool2D
from tensorflow.python.keras.layers import GlobalAveragePooling3D
from tensorflow.python.keras.layers import GlobalAveragePooling3D as GlobalAvgPool3D
from tensorflow.python.keras.layers import GlobalMaxPool1D
from tensorflow.python.keras.layers import GlobalMaxPool1D as GlobalMaxPooling1D
from tensorflow.python.keras.layers import GlobalMaxPool2D
from tensorflow.python.keras.layers import GlobalMaxPool2D as GlobalMaxPooling2D
from tensorflow.python.keras.layers import GlobalMaxPool3D
from tensorflow.python.keras.layers import GlobalMaxPool3D as GlobalMaxPooling3D
from tensorflow.python.keras.layers import LSTMCell_v2 as LSTMCell
from tensorflow.python.keras.layers import LSTM_v2 as LSTM
from tensorflow.python.keras.layers import Lambda
from tensorflow.python.keras.layers import LayerNormalization
from tensorflow.python.keras.layers import LeakyReLU
from tensorflow.python.keras.layers import LocallyConnected1D
from tensorflow.python.keras.layers import LocallyConnected2D
from tensorflow.python.keras.layers import Masking
from tensorflow.python.keras.layers import MaxPool1D
from tensorflow.python.keras.layers import MaxPool1D as MaxPooling1D
from tensorflow.python.keras.layers import MaxPool2D
from tensorflow.python.keras.layers import MaxPool2D as MaxPooling2D
from tensorflow.python.keras.layers import MaxPool3D
from tensorflow.python.keras.layers import MaxPool3D as MaxPooling3D
from tensorflow.python.keras.layers import Maximum
from tensorflow.python.keras.layers import Minimum
from tensorflow.python.keras.layers import Multiply
from tensorflow.python.keras.layers import PReLU
from tensorflow.python.keras.layers import Permute
from tensorflow.python.keras.layers import RNN
from tensorflow.python.keras.layers import ReLU
from tensorflow.python.keras.layers import RepeatVector
from tensorflow.python.keras.layers import Reshape
from tensorflow.python.keras.layers import SeparableConv1D
from tensorflow.python.keras.layers import SeparableConv1D as SeparableConvolution1D
from tensorflow.python.keras.layers import SeparableConv2D
from tensorflow.python.keras.layers import SeparableConv2D as SeparableConvolution2D
from tensorflow.python.keras.layers import SimpleRNN
from tensorflow.python.keras.layers import SimpleRNNCell
from tensorflow.python.keras.layers import Softmax
from tensorflow.python.keras.layers import SpatialDropout1D
from tensorflow.python.keras.layers import SpatialDropout2D
from tensorflow.python.keras.layers import SpatialDropout3D
from tensorflow.python.keras.layers import StackedRNNCells
from tensorflow.python.keras.layers import Subtract
from tensorflow.python.keras.layers import ThresholdedReLU
from tensorflow.python.keras.layers import TimeDistributed
from tensorflow.python.keras.layers import UpSampling1D
from tensorflow.python.keras.layers import UpSampling2D
from tensorflow.python.keras.layers import UpSampling3D
from tensorflow.python.keras.layers import Wrapper
from tensorflow.python.keras.layers import ZeroPadding1D
from tensorflow.python.keras.layers import ZeroPadding2D
from tensorflow.python.keras.layers import ZeroPadding3D
from tensorflow.python.keras.layers import add
from tensorflow.python.keras.layers import average
from tensorflow.python.keras.layers import concatenate
from tensorflow.python.keras.layers import deserialize
from tensorflow.python.keras.layers import dot
from tensorflow.python.keras.layers import maximum
from tensorflow.python.keras.layers import minimum
from tensorflow.python.keras.layers import multiply
from tensorflow.python.keras.layers import serialize
from tensorflow.python.keras.layers import subtract
del _print_function
| 56.495935
| 85
| 0.8725
| 882
| 6,949
| 6.85034
| 0.155329
| 0.301887
| 0.374048
| 0.459285
| 0.797087
| 0.797087
| 0.484773
| 0.162032
| 0.162032
| 0.143827
| 0
| 0.01215
| 0.076126
| 6,949
| 122
| 86
| 56.959016
| 0.928972
| 0.020722
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.991304
| 0
| 0.991304
| 0.017391
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1678e10a794f52bdcee3642668c0fb62a3319c18
| 34,739
|
py
|
Python
|
tests/integration_test/action_service_test.py
|
udit-pandey/kairon
|
98d9cab64f184e079052e7583853c193e7eba408
|
[
"Apache-2.0"
] | 97
|
2020-08-18T10:07:48.000Z
|
2022-03-26T18:33:37.000Z
|
tests/integration_test/action_service_test.py
|
digiteinfotech/kairon
|
6a2f0a056dbfe5c041fd9e00a6f5b878e339309e
|
[
"Apache-2.0"
] | 276
|
2020-08-27T23:24:35.000Z
|
2022-03-31T09:43:30.000Z
|
tests/integration_test/action_service_test.py
|
digiteinfotech/kairon
|
6a2f0a056dbfe5c041fd9e00a6f5b878e339309e
|
[
"Apache-2.0"
] | 46
|
2020-09-11T13:29:41.000Z
|
2022-03-08T12:27:17.000Z
|
from tornado.test.testing_test import AsyncHTTPTestCase
from kairon.actions.server import make_app
from kairon.shared.actions.data_objects import HttpActionConfig, SlotSetAction, Actions, FormValidations
from kairon.shared.actions.exception import ActionFailure
from kairon.shared.actions.models import ActionType
from kairon.shared.data.data_objects import Slots
from kairon.shared.utils import Utility
from kairon.shared.actions.utils import ActionUtility
from mongoengine import connect
import json
import responses
from mock import patch
import os
os.environ["system_file"] = "./tests/testing_data/system.yaml"
os.environ['ASYNC_TEST_TIMEOUT'] = "360"
Utility.load_environment()
connect(**Utility.mongoengine_connection())
class TestActionServer(AsyncHTTPTestCase):
def get_app(self):
return make_app()
def test_index(self):
response = self.fetch("/")
self.assertEqual(response.code, 200)
self.assertEqual(response.body.decode("utf8"), 'Kairon Server Running')
def test_http_action_execution(self):
action_name = "test_run_with_get"
action = HttpActionConfig(
auth_token="",
action_name=action_name,
response="The value of ${a.b.3} in ${a.b.d.0} is ${a.b.d}",
http_url="http://localhost:8081/mock",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict(), ActionType.http_action.value
http_url = 'http://localhost:8081/mock'
resp_msg = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
responses.start()
responses.add(
method=responses.GET,
url=http_url,
body=resp_msg,
status=200,
)
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": "5f50fd0a56b698ca10d35d2e"},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e"},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
with patch.object(ActionUtility, "get_action_config") as mocked:
mocked.side_effect = _get_action
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(len(response_json['events']), 1)
self.assertEqual(len(response_json['responses']), 1)
self.assertEqual(response_json['events'], [
{'event': 'slot', 'timestamp': None, 'name': 'KAIRON_ACTION_RESPONSE',
'value': "The value of 2 in red is ['red', 'buggy', 'bumpers']"}])
self.assertEqual(response_json['responses'][0]['text'],
"The value of 2 in red is ['red', 'buggy', 'bumpers']")
def test_http_action_failed_execution(self):
action_name = "test_run_with_get"
action = HttpActionConfig(
auth_token="",
action_name=action_name,
response="The value of ${a.b.3} in ${a.b.d.0} is ${a.b.d}",
http_url="http://localhost:8082/mock",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict(), ActionType.http_action.value
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {'bot': "5f50fd0a56b698ca10d35d2e"},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e"},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
with patch.object(ActionUtility, "get_action_config") as mocked:
mocked.side_effect = _get_action
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(len(response_json['events']), 1)
self.assertEqual(len(response_json['responses']), 1)
self.assertEqual(response_json['events'], [
{'event': 'slot', 'timestamp': None, 'name': 'KAIRON_ACTION_RESPONSE',
'value': "I have failed to process your request"}])
self.assertEqual(response_json['responses'][0]['text'], "I have failed to process your request")
def test_http_action_missing_action_name(self):
action_name = ""
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e"},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json, None)
def test_http_action_doesnotexist(self):
action_name = "does_not_exist_action"
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": "5f50fd0a56b698ca10d35d2e"},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e"},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json, {'events': [], 'responses': []})
def test_slot_set_action_from_value(self):
action_name = "test_slot_set_from_value"
action = SlotSetAction(
name=action_name,
slot="location",
type="from_value",
value="Mumbai",
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict(), ActionType.slot_set_action.value
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
with patch.object(ActionUtility, "get_action_config") as mocked:
mocked.side_effect = _get_action
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(len(response_json['events']), 1)
self.assertEqual(len(response_json['responses']), 0)
self.assertEqual(response_json['events'],
[{'event': 'slot', 'timestamp': None, 'name': 'location', 'value': 'Mumbai'}])
self.assertEqual(response_json['responses'], [])
def test_slot_set_action_reset_slot(self):
action_name = "test_slot_set_action_reset_slot"
action = SlotSetAction(
name=action_name,
slot="location",
type="reset_slot",
value="current_location",
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict(), ActionType.slot_set_action.value
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": 'Bengaluru', 'current_location': 'Bengaluru'},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None, 'current_location': None},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
with patch.object(ActionUtility, "get_action_config") as mocked:
mocked.side_effect = _get_action
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(len(response_json['events']), 1)
self.assertEqual(len(response_json['responses']), 0)
self.assertEqual(response_json['events'],
[{'event': 'slot', 'timestamp': None, 'name': 'location', 'value': None}])
self.assertEqual(response_json['responses'], [])
def test_slot_set_action_from_slot_not_present(self):
action_name = "test_slot_set_action_from_slot_not_present"
action = SlotSetAction(
name=action_name,
slot="location",
type="from_slot",
value="current_location",
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict(), ActionType.slot_set_action.value
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
with patch.object(ActionUtility, "get_action_config") as mocked:
mocked.side_effect = _get_action
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(len(response_json['events']), 1)
self.assertEqual(len(response_json['responses']), 0)
self.assertEqual(response_json['events'],
[{'event': 'slot', 'timestamp': None, 'name': 'location', 'value': None}])
self.assertEqual(response_json['responses'], [])
def test_invalid_action(self):
action_name = "custom_user_action"
def _get_action(*arge, **kwargs):
raise ActionFailure('Only http & slot set actions are compatible with action server')
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": "5f50fd0a56b698ca10d35d2e"},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e"},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json, {'events': [], 'responses': []})
def test_form_validation_action_valid_slot_value(self):
action_name = "validate_location"
bot = '5f50fd0a56b698ca10d35d2e'
user = 'test_user'
slot = 'location'
semantic_expression = {'and': [{'and': [{'operator': 'in', 'value': ['Mumbai', 'Bangalore']},
{'operator': 'startswith', 'value': 'M'},
{'operator': 'endswith', 'value': 'i'},
]},
{'or': [{'operator': 'has_length_greater_than', 'value': 20},
{'operator': 'has_no_whitespace'},
{'operator': 'matches_regex', 'value': '^[e]+.*[e]$'}]}]}
Actions(name=action_name, type=ActionType.form_validation_action.value, bot=bot, user=user).save()
FormValidations(name=action_name, slot=slot, validation_semantic=semantic_expression,
bot=bot, user=user).save()
Slots(name=slot, type='text', bot=bot, user=user).save()
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": bot, slot: 'Mumbai', 'requested_slot': slot},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json,
{'events': [{'event': 'slot', 'timestamp': None, 'name': 'location', 'value': 'Mumbai'}],
'responses': []})
def test_form_validation_action_valid_slot_value_with_utterance(self):
action_name = "validate_user"
bot = '5f50fd0a56b698ca10d35d2e'
user = 'test_user'
slot = 'user_id'
semantic_expression = {'and': [{'and': [{'operator': 'is_an_email_address'},
{'operator': 'is_not_null_or_empty'},
{'operator': 'endswith', 'value': '.com'},
]},
{'or': [{'operator': 'has_length_greater_than', 'value': 4},
{'operator': 'has_no_whitespace'},
]}]}
Actions(name=action_name, type=ActionType.form_validation_action.value, bot=bot, user=user).save()
FormValidations(name=action_name, slot='location', validation_semantic=semantic_expression,
bot=bot, user=user).save()
FormValidations(name=action_name, slot=slot, validation_semantic=semantic_expression,
bot=bot, user=user,
utter_msg_on_valid='that is great!').save()
Slots(name=slot, type='text', bot=bot, user=user).save()
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": bot, slot: 'pandey.udit867@gmail.com', 'requested_slot': slot},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json, {
'events': [{'event': 'slot', 'timestamp': None, 'name': 'user_id', 'value': 'pandey.udit867@gmail.com'}],
'responses': [
{'text': 'that is great!', 'buttons': [], 'elements': [], 'custom': {}, 'template': None, 'image': None,
'attachment': None}]})
def test_form_validation_action_invalid_slot_value(self):
action_name = "validate_form_with_3_validations"
bot = '5f50fd0a56b698ca10d35d2e'
user = 'test_user'
slot = 'current_location'
semantic_expression = {'and': [{'and': [{'operator': 'in', 'value': ['Mumbai', 'Bangalore']},
{'operator': 'startswith', 'value': 'M'},
{'operator': 'endswith', 'value': 'i'},
]},
{'or': [{'operator': 'has_length_greater_than', 'value': 20},
{'operator': 'has_no_whitespace'},
{'operator': 'matches_regex', 'value': '^[e]+.*[e]$'}]}]}
Actions(name=action_name, type=ActionType.form_validation_action.value, bot=bot, user=user).save()
FormValidations(name=action_name, slot='name', validation_semantic=semantic_expression,
bot=bot, user=user).save().to_mongo().to_dict()
FormValidations(name=action_name, slot='user_id', validation_semantic=semantic_expression,
bot=bot, user=user, utter_msg_on_valid='that is great!').save().to_mongo().to_dict()
FormValidations(name=action_name, slot=slot, validation_semantic=semantic_expression,
bot=bot, user=user).save().to_mongo().to_dict()
Slots(name=slot, type='text', bot=bot, user=user).save()
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": bot, slot: 'Delhi', 'requested_slot': slot},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "current_location": None},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json,
{'events': [{'event': 'slot', 'timestamp': None, 'name': 'current_location', 'value': None}],
'responses': []})
def test_form_validation_action_invalid_slot_value_with_utterance(self):
action_name = "validate_form"
bot = '5f50fd0a56b698ca10d35d2e'
user = 'test_user'
slot = 'profession'
semantic_expression = {'and': [{'and': [{'operator': 'is_not_null_or_empty'},
{'operator': 'endswith', 'value': '.com'},
]},
{'or': [{'operator': 'has_length_greater_than', 'value': 4},
{'operator': 'has_no_whitespace'},
]}]}
Actions(name=action_name, type=ActionType.form_validation_action.value, bot=bot, user=user).save()
FormValidations(name=action_name, slot='some_slot', validation_semantic=semantic_expression,
bot=bot, user=user).save().to_mongo().to_dict()
FormValidations(name=action_name, slot=slot, validation_semantic=semantic_expression,
bot=bot, user=user, utter_msg_on_valid='that is great!',
utter_msg_on_invalid='Invalid value. Please type again!').save().to_mongo().to_dict()
Slots(name=slot, type='text', bot=bot, user=user).save()
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": bot, slot: 'computer programmer', 'requested_slot': slot},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json, {
'events': [{'event': 'slot', 'timestamp': None, 'name': 'profession', 'value': None}],
'responses': [
{'text': 'Invalid value. Please type again!', 'buttons': [], 'elements': [], 'custom': {},
'template': None, 'image': None,
'attachment': None}]})
def test_form_validation_action_no_validation_configured(self):
action_name = "validate_user_details"
bot = '5f50fd0a56b698ca10d35d2e'
user = 'test_user'
slot = 'age'
Actions(name=action_name, type=ActionType.form_validation_action.value, bot=bot, user=user).save()
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": bot, slot: 10, 'requested_slot': slot},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json, {
'events': [{'event': 'slot', 'timestamp': None, 'name': 'age', 'value': 10}],
'responses': []})
semantic_expression = {'and': [{'and': [{'operator': 'is_not_null_or_empty'},
{'operator': 'ends_with', 'value': '.com'},
]},
{'or': [{'operator': 'has_length_greater_than', 'value': 4},
{'operator': 'has_no_whitespace'},
]}]}
FormValidations(name=action_name, slot='name', validation_semantic=semantic_expression,
bot=bot, user=user, utter_msg_on_valid='that is great!').save()
FormValidations(name=action_name, slot='occupation', validation_semantic=semantic_expression,
bot=bot, user=user, utter_msg_on_valid='that is great!').save()
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json, {
'events': [{'event': 'slot', 'timestamp': None, 'name': 'age', 'value': 10}],
'responses': []})
def test_form_validation_action_slot_type_not_found(self):
action_name = "validate_hotel_booking"
bot = '5f50fd0a56b698ca10d35d2e'
user = 'test_user'
slot = 'reservation_id'
Actions(name=action_name, type=ActionType.form_validation_action.value, bot=bot, user=user).save()
FormValidations(name=action_name, slot=slot, validation_semantic={},
bot=bot, user=user, utter_msg_on_valid='that is great!',
utter_msg_on_invalid='Invalid value. Please type again!').save()
request_object = {
"next_action": action_name,
"tracker": {
"sender_id": "default",
"conversation_id": "default",
"slots": {"bot": bot, slot: '10974872t49', 'requested_slot': slot},
"latest_message": {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]},
"latest_event_time": 1537645578.314389,
"followup_action": "action_listen",
"paused": False,
"events": [{"event1": "hello"}, {"event2": "how are you"}],
"latest_input_channel": "rest",
"active_loop": {},
"latest_action": {},
},
"domain": {
"config": {},
"session_config": {},
"intents": [],
"entities": [],
"slots": {"bot": "5f50fd0a56b698ca10d35d2e", "location": None},
"responses": {},
"actions": [],
"forms": {},
"e2e_actions": []
},
"version": "version"
}
response = self.fetch("/webhook", method="POST", body=json.dumps(request_object).encode('utf-8'))
response_json = json.loads(response.body.decode("utf8"))
self.assertEqual(response.code, 200)
self.assertEqual(response_json, {'events': [], 'responses': []})
| 46.072944
| 120
| 0.499266
| 2,988
| 34,739
| 5.573963
| 0.082999
| 0.031822
| 0.051096
| 0.017652
| 0.907175
| 0.901471
| 0.872651
| 0.860282
| 0.84323
| 0.829661
| 0
| 0.038141
| 0.34716
| 34,739
| 753
| 121
| 46.13413
| 0.696239
| 0
| 0
| 0.768908
| 0
| 0.002801
| 0.265321
| 0.034543
| 0
| 0
| 0
| 0
| 0.065826
| 1
| 0.030812
| false
| 0
| 0.018207
| 0.008403
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16b6045ec86c3ca1ecd4c5c4220fbb69e0fbd130
| 11,159
|
py
|
Python
|
tests/units/fastsync/commons/test_fastsync_tap_postgres.py
|
EwayJSC/pipelinewise
|
2d907a2b78fc5a491ce437217f5b64b0ef87964b
|
[
"Apache-2.0"
] | null | null | null |
tests/units/fastsync/commons/test_fastsync_tap_postgres.py
|
EwayJSC/pipelinewise
|
2d907a2b78fc5a491ce437217f5b64b0ef87964b
|
[
"Apache-2.0"
] | 37
|
2021-06-07T07:12:23.000Z
|
2022-03-28T23:08:04.000Z
|
tests/units/fastsync/commons/test_fastsync_tap_postgres.py
|
EwayJSC/pipelinewise
|
2d907a2b78fc5a491ce437217f5b64b0ef87964b
|
[
"Apache-2.0"
] | 1
|
2020-08-03T06:53:35.000Z
|
2020-08-03T06:53:35.000Z
|
from unittest import TestCase
from unittest.mock import MagicMock, Mock, PropertyMock, patch
from pipelinewise.fastsync.commons.tap_postgres import FastSyncTapPostgres
class TestFastSyncTapPostgres(TestCase):
"""
Unit tests for fastsync tap postgres
"""
def setUp(self) -> None:
"""Initialise test FastSyncTapPostgres object"""
self.postgres = FastSyncTapPostgres(connection_config={'dbname': 'test_database',
'tap_id': 'test_tap'},
tap_type_to_target_type={})
self.postgres.executed_queries_primary_host = []
self.postgres.executed_queries = []
def primary_host_query_mock(query, _=None):
self.postgres.executed_queries_primary_host.append(query)
self.postgres.primary_host_query = primary_host_query_mock
def test_generate_repl_slot_name(self):
"""Validate if the replication slot name generated correctly"""
# Provide only database name
assert self.postgres.generate_replication_slot_name('some_db') == 'pipelinewise_some_db'
# Provide database name and tap_id
assert self.postgres.generate_replication_slot_name('some_db',
'some_tap') == 'pipelinewise_some_db_some_tap'
# Provide database name, tap_id and prefix
assert self.postgres.generate_replication_slot_name('some_db',
'some_tap',
prefix='custom_prefix') == 'custom_prefix_some_db_some_tap'
# Replication slot name should be lowercase
assert self.postgres.generate_replication_slot_name('SoMe_DB',
'SoMe_TaP') == 'pipelinewise_some_db_some_tap'
# Invalid characters should be replaced by underscores
assert self.postgres.generate_replication_slot_name('some-db',
'some-tap') == 'pipelinewise_some_db_some_tap'
assert self.postgres.generate_replication_slot_name('some.db',
'some.tap') == 'pipelinewise_some_db_some_tap'
def test_create_replication_slot_1(self):
"""
Validate if replication slot creation SQL commands generated correctly in case no v15 slots exists
"""
def execute_mock(query):
print('Mocked execute called')
self.postgres.executed_queries_primary_host.append(query)
# mock cursor with execute method
cursor_mock = MagicMock().return_value
cursor_mock.__enter__.return_value.execute.side_effect = execute_mock
type(cursor_mock.__enter__.return_value).rowcount = PropertyMock(return_value=0)
# mock PG connection instance with ability to open cursor
pg_con = Mock()
pg_con.cursor.return_value = cursor_mock
self.postgres.primary_host_conn = pg_con
self.postgres.create_replication_slot()
assert self.postgres.executed_queries_primary_host == [
"SELECT * FROM pg_replication_slots WHERE slot_name = 'pipelinewise_test_database';",
"SELECT * FROM pg_create_logical_replication_slot('pipelinewise_test_database_test_tap', 'wal2json')"
]
def test_create_replication_slot_2(self):
"""
Validate if replication slot creation SQL commands generated correctly in case a v15 slots exists
"""
def execute_mock(query):
print('Mocked execute called')
self.postgres.executed_queries_primary_host.append(query)
# mock cursor with execute method
cursor_mock = MagicMock().return_value
cursor_mock.__enter__.return_value.execute.side_effect = execute_mock
type(cursor_mock.__enter__.return_value).rowcount = PropertyMock(return_value=1)
# mock PG connection instance with ability to open cursor
pg_con = Mock()
pg_con.cursor.return_value = cursor_mock
self.postgres.primary_host_conn = pg_con
self.postgres.create_replication_slot()
assert self.postgres.executed_queries_primary_host == [
"SELECT * FROM pg_replication_slots WHERE slot_name = 'pipelinewise_test_database';",
"SELECT * FROM pg_create_logical_replication_slot('pipelinewise_test_database', 'wal2json')"
]
@patch('pipelinewise.fastsync.commons.tap_postgres.psycopg2.connect')
def test_get_connection_to_primary(self, connect_mock):
"""
Check that get connection uses the right credentials to connect to primary
"""
creds = {
'host': 'my_primary_host',
'user': 'my_primary_user',
'password': 'my_primary_user',
'dbname': 'my_db',
'port': 'my_primary_port',
}
self.assertEqual(FastSyncTapPostgres.get_connection(creds, prioritize_primary=True),
connect_mock.return_value)
connect_mock.assert_called_once_with(
f"host='{creds['host']}' port='{creds['port']}' user='{creds['user']}' password='{creds['password']}' "
f"dbname='{creds['dbname']}'")
self.assertTrue(connect_mock.autocommit)
@patch('pipelinewise.fastsync.commons.tap_postgres.psycopg2.connect')
def test_get_connection_to_sec(self, connect_mock):
"""
Check that get connection uses the right credentials to connect to secondary if present
"""
creds = {
'host': 'my_primary_host',
'replica_host': 'my_replica_host',
'user': 'my_primary_user',
'replica_user': 'my_replica_user',
'password': 'my_primary_user',
'replica_password': 'my_replica_user',
'dbname': 'my_db',
'port': 'my_primary_port',
'replica_port': 'my_replica_port',
}
self.assertEqual(FastSyncTapPostgres.get_connection(creds, prioritize_primary=False),
connect_mock.return_value)
connect_mock.assert_called_once_with(
f"host='{creds['replica_host']}' port='{creds['replica_port']}' user='{creds['replica_user']}' password"
f"='{creds['replica_password']}' "
f"dbname='{creds['dbname']}'")
self.assertTrue(connect_mock.autocommit)
@patch('pipelinewise.fastsync.commons.tap_postgres.psycopg2.connect')
def test_get_connection_fallback(self, connect_mock):
"""
Check that get connection uses the primary server credentials as a fallback
"""
creds = {
'host': 'my_primary_host',
'replica_host': 'my_replica_host',
'user': 'my_primary_user',
'password': 'my_primary_user',
'dbname': 'my_db',
'port': 'my_primary_port',
}
self.assertEqual(FastSyncTapPostgres.get_connection(creds, prioritize_primary=False),
connect_mock.return_value)
connect_mock.assert_called_once_with(
f"host='{creds['replica_host']}' port='{creds['port']}' user='{creds['user']}' password"
f"='{creds['password']}' dbname='{creds['dbname']}'")
self.assertTrue(connect_mock.autocommit)
@patch('pipelinewise.fastsync.commons.tap_postgres.psycopg2.connect')
def test_get_connection_ssl(self, connect_mock):
"""
Check that get connection uses ssl when present
"""
creds = {
'host': 'my_primary_host',
'user': 'my_primary_user',
'password': 'my_primary_user',
'dbname': 'my_db',
'port': 'my_primary_port',
'ssl': 'true'
}
self.assertEqual(FastSyncTapPostgres.get_connection(creds, prioritize_primary=False),
connect_mock.return_value)
connect_mock.assert_called_once_with(
f"host='{creds['host']}' port='{creds['port']}' user='{creds['user']}' password"
f"='{creds['password']}' dbname='{creds['dbname']}' sslmode='require'")
self.assertTrue(connect_mock.autocommit)
@patch('pipelinewise.fastsync.commons.tap_postgres.psycopg2.connect')
def test_drop_slot_v15(self, connect_mock):
"""
Check that dropping slots works fine for v15 slots
"""
def execute_mock(query):
print('Mocked execute called')
self.postgres.executed_queries_primary_host.append(query)
creds = {
'host': 'my_primary_host',
'user': 'my_primary_user',
'password': 'my_primary_user',
'dbname': 'my_db',
'port': 'my_primary_port',
'ssl': 'true',
'tap_id': 'tap_test'
}
# mock cursor with execute method
cursor_mock = MagicMock().return_value
cursor_mock.__enter__.return_value.execute.side_effect = execute_mock
type(cursor_mock.__enter__.return_value).rowcount = PropertyMock(side_effect=[1, 2])
# mock PG connection instance with ability to open cursor
pg_con = Mock()
pg_con.cursor.return_value = cursor_mock
connect_mock.return_value = pg_con
self.postgres.drop_slot(creds)
assert self.postgres.executed_queries_primary_host == [
"SELECT * FROM pg_replication_slots WHERE slot_name = 'pipelinewise_my_db';",
'SELECT pg_drop_replication_slot(slot_name) FROM pg_replication_slots WHERE '
"slot_name = 'pipelinewise_my_db';",
]
@patch('pipelinewise.fastsync.commons.tap_postgres.psycopg2.connect')
def test_drop_slot_v16(self, connect_mock):
"""
Check that dropping slots works fine for v16 slots
"""
def execute_mock(query):
print('Mocked execute called')
self.postgres.executed_queries_primary_host.append(query)
creds = {
'host': 'my_primary_host',
'user': 'my_primary_user',
'password': 'my_primary_user',
'dbname': 'my_db',
'port': 'my_primary_port',
'ssl': 'true',
'tap_id': 'tap_test'
}
# mock cursor with execute method
cursor_mock = MagicMock().return_value
cursor_mock.__enter__.return_value.execute.side_effect = execute_mock
type(cursor_mock.__enter__.return_value).rowcount = PropertyMock(side_effect=[0, 1])
# mock PG connection instance with ability to open cursor
pg_con = Mock()
pg_con.cursor.return_value = cursor_mock
connect_mock.return_value = pg_con
self.postgres.drop_slot(creds)
assert self.postgres.executed_queries_primary_host == [
"SELECT * FROM pg_replication_slots WHERE slot_name = 'pipelinewise_my_db';",
'SELECT pg_drop_replication_slot(slot_name) FROM pg_replication_slots WHERE '
"slot_name = 'pipelinewise_my_db_tap_test';",
]
| 41.025735
| 119
| 0.623712
| 1,210
| 11,159
| 5.406612
| 0.115702
| 0.045858
| 0.023846
| 0.045399
| 0.845613
| 0.827423
| 0.819474
| 0.819474
| 0.800978
| 0.786457
| 0
| 0.003468
| 0.276458
| 11,159
| 271
| 120
| 41.177122
| 0.806787
| 0.113989
| 0
| 0.651429
| 0
| 0.005714
| 0.284946
| 0.140921
| 0
| 0
| 0
| 0
| 0.125714
| 1
| 0.085714
| false
| 0.08
| 0.017143
| 0
| 0.108571
| 0.022857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
16dfcfb7ef083b5ac387cb144c7b59510ae21c64
| 4,850
|
py
|
Python
|
axelrod/tests/strategies/test_averagecopier.py
|
danilobellini/Axelrod
|
2c9212553e06095c24adcb82a5979279cbdf45fb
|
[
"MIT"
] | null | null | null |
axelrod/tests/strategies/test_averagecopier.py
|
danilobellini/Axelrod
|
2c9212553e06095c24adcb82a5979279cbdf45fb
|
[
"MIT"
] | 1
|
2019-01-22T09:59:52.000Z
|
2019-01-22T09:59:52.000Z
|
axelrod/tests/strategies/test_averagecopier.py
|
danilobellini/Axelrod
|
2c9212553e06095c24adcb82a5979279cbdf45fb
|
[
"MIT"
] | null | null | null |
"""Tests for the AverageCopier strategies."""
import axelrod
from .test_player import TestPlayer
C, D = axelrod.Action.C, axelrod.Action.D
class TestAverageCopier(TestPlayer):
name = "Average Copier"
player = axelrod.AverageCopier
expected_classifier = {
"memory_depth": float("inf"), # Long memory
"stochastic": True,
"makes_use_of": set(),
"long_run_time": False,
"inspects_source": False,
"manipulates_source": False,
"manipulates_state": False,
}
def test_strategy(self):
# Tests that if opponent has played all C then player chooses C.
actions = [(C, C)] * 10
self.versus_test(axelrod.Cooperator(), expected_actions=actions, seed=1)
actions = [(D, C)] + [(C, C)] * 9
self.versus_test(axelrod.Cooperator(), expected_actions=actions, seed=2)
# Tests that if opponent has played all D then player chooses D.
actions = [(C, D)] + [(D, D)] * 9
self.versus_test(axelrod.Defector(), expected_actions=actions, seed=1)
actions = [(D, D)] + [(D, D)] * 9
self.versus_test(axelrod.Defector(), expected_actions=actions, seed=2)
# Variable behaviour based on the history and stochastic
actions = [
(C, C),
(C, D),
(D, C),
(D, D),
(C, C),
(C, D),
(C, C),
(D, D),
(D, C),
(C, D),
]
self.versus_test(axelrod.Alternator(), expected_actions=actions, seed=1)
actions = [
(D, C),
(C, D),
(D, C),
(C, D),
(C, C),
(D, D),
(D, C),
(D, D),
(C, C),
(D, D),
]
self.versus_test(axelrod.Alternator(), expected_actions=actions, seed=2)
opponent = axelrod.MockPlayer(actions=[C, C, D, D, D, D])
actions = [
(C, C),
(C, C),
(C, D),
(D, D),
(D, D),
(C, D),
(D, C),
(D, C),
(D, D),
(D, D),
]
self.versus_test(opponent, expected_actions=actions, seed=1)
opponent = axelrod.MockPlayer(actions=[C, C, C, D, D, D])
actions = [
(D, C),
(C, C),
(C, C),
(C, D),
(D, D),
(C, D),
(C, C),
(D, C),
(D, C),
(D, D),
]
self.versus_test(opponent, expected_actions=actions, seed=2)
class TestNiceAverageCopier(TestPlayer):
name = "Nice Average Copier"
player = axelrod.NiceAverageCopier
expected_classifier = {
"memory_depth": float("inf"), # Long memory
"stochastic": True,
"makes_use_of": set(),
"long_run_time": False,
"inspects_source": False,
"manipulates_source": False,
"manipulates_state": False,
}
def test_strategy(self):
# Tests that if opponent has played all C then player chooses C.
actions = [(C, C)] * 10
self.versus_test(axelrod.Cooperator(), expected_actions=actions, seed=1)
# Tests that if opponent has played all D then player chooses D.
actions = [(C, D)] + [(D, D)] * 9
self.versus_test(axelrod.Defector(), expected_actions=actions, seed=1)
# Variable behaviour based on the history and stochastic behaviour
actions = [
(C, C),
(C, D),
(C, C),
(D, D),
(D, C),
(C, D),
(C, C),
(C, D),
(D, C),
(D, D),
]
self.versus_test(axelrod.Alternator(), expected_actions=actions, seed=1)
actions = [
(C, C),
(C, D),
(D, C),
(D, D),
(C, C),
(C, D),
(D, C),
(D, D),
(D, C),
(C, D),
]
self.versus_test(axelrod.Alternator(), expected_actions=actions, seed=2)
opponent = axelrod.MockPlayer(actions=[C, C, D, D, D, D])
actions = [
(C, C),
(C, C),
(C, D),
(C, D),
(D, D),
(D, D),
(C, C),
(D, C),
(C, D),
(D, D),
]
self.versus_test(opponent, expected_actions=actions, seed=1)
opponent = axelrod.MockPlayer(actions=[C, C, C, D, D, D])
actions = [
(C, C),
(C, C),
(C, C),
(C, D),
(D, D),
(D, D),
(C, C),
(C, C),
(D, C),
(D, D),
]
self.versus_test(opponent, expected_actions=actions, seed=2)
| 27.094972
| 80
| 0.447629
| 542
| 4,850
| 3.915129
| 0.129151
| 0.053723
| 0.040999
| 0.032045
| 0.868992
| 0.862394
| 0.861923
| 0.852969
| 0.805844
| 0.764373
| 0
| 0.007639
| 0.406186
| 4,850
| 178
| 81
| 27.247191
| 0.729167
| 0.089897
| 0
| 0.845638
| 0
| 0
| 0.052943
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013423
| false
| 0
| 0.013423
| 0
| 0.080537
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc76da099d56a4ce6e105216336ed28731a827f4
| 83
|
py
|
Python
|
tests/parser/choice.7.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/choice.7.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/choice.7.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
a | b.
c | d :- not a.
"""
output = """
a | b.
c | d :- not a.
"""
| 9.222222
| 16
| 0.325301
| 14
| 83
| 1.928571
| 0.5
| 0.148148
| 0.222222
| 0.296296
| 0.592593
| 0.592593
| 0
| 0
| 0
| 0
| 0
| 0
| 0.361446
| 83
| 8
| 17
| 10.375
| 0.509434
| 0
| 0
| 0.75
| 0
| 0
| 0.607595
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc8363c6f3f2b366b917650c170211d424a23bbe
| 3,222
|
py
|
Python
|
logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py
|
jo2y/google-cloud-python
|
1b76727be16bc4335276f793340bb72d32be7166
|
[
"Apache-2.0"
] | 1
|
2021-06-30T11:43:47.000Z
|
2021-06-30T11:43:47.000Z
|
logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py
|
jo2y/google-cloud-python
|
1b76727be16bc4335276f793340bb72d32be7166
|
[
"Apache-2.0"
] | 1
|
2021-06-25T15:16:57.000Z
|
2021-06-25T15:16:57.000Z
|
logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py
|
jo2y/google-cloud-python
|
1b76727be16bc4335276f793340bb72d32be7166
|
[
"Apache-2.0"
] | 1
|
2021-06-30T11:44:03.000Z
|
2021-06-30T11:44:03.000Z
|
config = {
"interfaces": {
"google.logging.v2.ConfigServiceV2": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
"non_idempotent": []
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.2,
"max_retry_delay_millis": 1000,
"initial_rpc_timeout_millis": 30000,
"rpc_timeout_multiplier": 1.5,
"max_rpc_timeout_millis": 60000,
"total_timeout_millis": 90000
},
"write_sink": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.2,
"max_retry_delay_millis": 1000,
"initial_rpc_timeout_millis": 30000,
"rpc_timeout_multiplier": 1.5,
"max_rpc_timeout_millis": 60000,
"total_timeout_millis": 120000
}
},
"methods": {
"ListSinks": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"GetSink": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"CreateSink": {
"timeout_millis": 120000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"UpdateSink": {
"timeout_millis": 120000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"DeleteSink": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"ListExclusions": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"GetExclusion": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"CreateExclusion": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"UpdateExclusion": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"DeleteExclusion": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
}
}
}
}
}
| 38.819277
| 79
| 0.419305
| 213
| 3,222
| 5.896714
| 0.220657
| 0.165605
| 0.183917
| 0.199045
| 0.780255
| 0.780255
| 0.780255
| 0.780255
| 0.780255
| 0.780255
| 0
| 0.063653
| 0.478274
| 3,222
| 82
| 80
| 39.292683
| 0.683522
| 0
| 0
| 0.512195
| 0
| 0
| 0.385785
| 0.097145
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.